repo_name
stringlengths 5
100
| path
stringlengths 4
299
| copies
stringclasses 990
values | size
stringlengths 4
7
| content
stringlengths 666
1.03M
| license
stringclasses 15
values | hash
int64 -9,223,351,895,964,839,000
9,223,297,778B
| line_mean
float64 3.17
100
| line_max
int64 7
1k
| alpha_frac
float64 0.25
0.98
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
Antiun/odoo | addons/l10n_gt/__openerp__.py | 260 | 2305 | # -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (c) 2009-2010 Soluciones TecnologΓ³cias Prisma S.A. All Rights Reserved.
# JosΓ© Rodrigo FernΓ‘ndez Menegazzo, Soluciones TecnologΓ³cias Prisma S.A.
# (http://www.solucionesprisma.com)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
#
# This module provides a minimal Guatemalan chart of accounts that can be use
# to build upon a more complex one. It also includes a chart of taxes and
# the Quetzal currency.
#
# This module is based on the UK minimal chart of accounts:
# Copyright (c) 2004-2009 Seath Solutions Ltd. All Rights Reserved.
# Geoff Gardiner, Seath Solutions Ltd (http://www.seathsolutions.com/)
#
# This module works with OpenERP 6.0
#
{
'name': 'Guatemala - Accounting',
'version': '3.0',
'category': 'Localization/Account Charts',
'description': """
This is the base module to manage the accounting chart for Guatemala.
=====================================================================
Agrega una nomenclatura contable para Guatemala. TambiΓ©n icluye impuestos y
la moneda del Quetzal. -- Adds accounting chart for Guatemala. It also includes
taxes and the Quetzal currency.""",
'author': 'JosΓ© Rodrigo FernΓ‘ndez Menegazzo',
'website': 'http://solucionesprisma.com/',
'depends': ['base', 'account', 'account_chart'],
'data': [
'account_types.xml',
'account_chart.xml',
'account_tax.xml',
'l10n_gt_base.xml',
],
'demo': [],
'installable': True,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | 4,645,643,458,688,282,000 | 38.62069 | 83 | 0.644909 | false |
Sinkmanu/auth-system-chronos | Chronos Auth System/tools/Chronos Auth System/tools/urwid/font.py | 8 | 25430 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Urwid BigText fonts
# Copyright (C) 2004-2006 Ian Ward
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# Urwid web site: http://excess.org/urwid/
from __future__ import nested_scopes
import re
from escape import utf8decode, SAFE_ASCII_DEC_SPECIAL_RE
from util import apply_target_encoding, str_util
from canvas import TextCanvas
try: True # old python?
except: False, True = 0, 1
def separate_glyphs(gdata, height):
"""return (dictionary of glyphs, utf8 required)"""
gl = gdata.split("\n")
del gl[0]
del gl[-1]
for g in gl:
assert "\t" not in g
assert len(gl) == height+1, `gdata`
key_line = gl[0]
del gl[0]
c = None # current character
key_index = 0 # index into character key line
end_col = 0 # column position at end of glyph
start_col = 0 # column position at start of glyph
jl = [0]*height # indexes into lines of gdata (gl)
dout = {}
utf8_required = False
while True:
if c is None:
if key_index >= len(key_line):
break
c = key_line[key_index]
if key_index < len(key_line) and key_line[key_index] == c:
end_col += str_util.get_width(ord(c))
key_index += 1
continue
out = []
for k in range(height):
l = gl[k]
j = jl[k]
y = 0
fill = 0
while y < end_col - start_col:
if j >= len(l):
fill = end_col - start_col - y
break
y += str_util.get_width(ord(l[j]))
j += 1
assert y + fill == end_col - start_col, \
`y, fill, end_col`
segment = l[jl[k]:j]
if not SAFE_ASCII_DEC_SPECIAL_RE.match(segment):
utf8_required = True
out.append(segment + " " * fill)
jl[k] = j
start_col = end_col
dout[c] = (y + fill, out)
c = None
return dout, utf8_required
_all_fonts = []
def get_all_fonts():
"""
Return a list of (font name, font class) tuples.
"""
return _all_fonts[:]
def add_font(name, cls):
_all_fonts.append((name, cls))
class Font(object):
def __init__(self):
assert self.height
assert self.data
self.char = {}
self.canvas = {}
self.utf8_required = False
for gdata in self.data:
self.add_glyphs(gdata)
def add_glyphs(self, gdata):
d, utf8_required = separate_glyphs(gdata, self.height)
self.char.update(d)
self.utf8_required |= utf8_required
def characters(self):
l = self.char.keys()
l.sort()
return "".join(l)
def char_width(self, c):
if self.char.has_key(c):
return self.char[c][0]
return 0
def char_data(self, c):
return self.char[c][1]
def render(self, c):
if c in self.canvas:
return self.canvas[c]
width, l = self.char[c]
tl = []
csl = []
for d in l:
t, cs = apply_target_encoding(d)
tl.append(t)
csl.append(cs)
canv = TextCanvas(tl, None, csl, maxcol=width,
check_width=False)
self.canvas[c] = canv
return canv
#safe_palette = utf8decode("βββββΌβββ€β΄β¬β")
#more_palette = utf8decode("βββββββββββββββββ β‘β’β£β€β₯β¦β§β¨β©βͺβ«β¬β")
#block_palette = utf8decode("β#β#β#β#β#β#β#β#β#β#β#β#β#β#β")
class Thin3x3Font(Font):
height = 3
data = [utf8decode("""
000111222333444555666777888999 !
βββ β ββββββ βββ ββ βββββββββ β
β β β βββ ββ€βββΌββββββ βΌβββ€βββ€ β
βββ β΄ ββ βββ β΄ βββββ β΄βββ ββ .
"""), utf8decode(r"""
"###$$$%%%'*++,--.///:;==???[[\\\]]^__`
" βΌβΌββΌβO /' /.. _ββββ \ β^ `
βΌβΌββΌβ / * βΌ β / ., _ βββ \ β
ββΌβ/ O , ./ . β \ β ββ
""")]
add_font("Thin 3x3",Thin3x3Font)
class Thin4x3Font(Font):
height = 3
data = Thin3x3Font.data + [utf8decode("""
0000111122223333444455556666777788889999 ####$$$$
ββββ β ββββββββ ββββ βββ ββββββββββββ βΌββΌββΌβΌβ
β β β ββββ ββ€ββββΌββββββββ βΌββββ€ββββ€ βΌββΌββΌβΌβ
ββββ β΄ βββ ββββ β΄ βββββββ β΄ββββ βββ ββΌβΌβ
""")]
add_font("Thin 4x3",Thin4x3Font)
class HalfBlock5x4Font(Font):
height = 4
data = [utf8decode("""
00000111112222233333444445555566666777778888899999 !!
ββββ ββ ββββ ββββ β β ββββ βββ ββββ ββββ ββββ β
β β β ββ ββ ββββ βββ βββ ββ ββββ ββββ β
β β β ββ β β β β β β β β β β β
ββ βββ ββββ ββ β βββ ββ β ββ ββ β
"""), utf8decode('''
"""######$$$$$$%%%%%&&&&&((()))******++++++,,,-----..////:::;;
βββ β β βββββ ββββ βββ β β β β β ββ
βββββ ββββ β βββ ββ ββ βββββ βββββ ββββ β β β
βββββ β β β βββ β βββββ ββ βββ β ββ β ββ
β β βββ β β ββ β β ββ β β
'''), utf8decode(r"""
<<<<<=====>>>>>?????@@@@@@[[[[\\\\]]]]^^^^____```{{{{||}}}}~~~~''´´´
ββ ββ ββββ βββββ βββ ββ βββ βββ ββ ββ β ββ β β ββ
ββ ββββ ββ ββ β βββ β β β ββ β ββ ββββ
ββ ββββ ββ β β βββ β ββ β β β β β
β β β βββ βββ β βββ ββββ β β β
"""), utf8decode('''
AAAAABBBBBCCCCCDDDDDEEEEEFFFFFGGGGGHHHHHIIJJJJJKKKKK
ββββ ββββ ββββ ββββ ββββ ββββ ββββ β β β β β β
ββββ ββββ β β β βββ βββ β ββββ β β βββ
β β β β β β β β β β β ββ β β β β β β ββ
β β βββ ββ βββ ββββ β ββ β β β ββ β β
'''), utf8decode('''
LLLLLMMMMMMNNNNNOOOOOPPPPPQQQQQRRRRRSSSSSTTTTT
β ββ ββ ββ β ββββ ββββ ββββ ββββ ββββ βββββ
β β β β ββββ β β ββββ β β ββββ βββ β
β β β β ββ β β β β ββ β β β β β
ββββ β β β β ββ β βββ β β ββ β
'''), utf8decode('''
UUUUUVVVVVVWWWWWWXXXXXXYYYYYYZZZZZ
β β β β β β β β β β ββββ
β β ββ ββ β β β βββ βββ ββ
β β β β βββββ ββ ββ β β
ββ β β β β β β ββββ
'''), utf8decode('''
aaaaabbbbbcccccdddddeeeeeffffggggghhhhhiijjjjkkkkk
β β βββ β β β β
βββ ββββ ββββ ββββ ββββ βββ ββββ ββββ β β β ββ
ββββ β β β β β β βββ β ββββ β β β β βββ
βββ βββ ββ βββ ββ β βββ β β β βββ β β
'''), utf8decode('''
llmmmmmmnnnnnooooopppppqqqqqrrrrssssstttt
β β
β βββββ ββββ ββββ ββββ ββββ βββ ββββ βββ
β β β β β β β β β β β β β βββ β
β β β β β ββ βββ βββ β βββ β
'''), utf8decode('''
uuuuuvvvvvwwwwwwxxxxxxyyyyyzzzzz
β β β β β β β ββ ββ β β ββββ
β β ββββ βββββ βββ ββββ ββ
ββ ββ β β β β βββ ββββ
''')]
add_font("Half Block 5x4",HalfBlock5x4Font)
class HalfBlock6x5Font(Font):
height = 5
data = [utf8decode("""
000000111111222222333333444444555555666666777777888888999999 ..::////
βββββ ββ βββββ βββββ β β βββββ ββββ βββββ βββββ βββββ β
β β β β β β β β β ββ β β β β β ββ
β β β ββ βββ βββββ βββββ βββββ β βββββ ββββ β β
β β β ββ β β β β β β ββ β β β ββ
βββ βββ βββββ βββ β ββββ βββ β βββ βββ β β
""")]
add_font("Half Block 6x5",HalfBlock6x5Font)
class HalfBlockHeavy6x5Font(Font):
height = 5
data = [utf8decode("""
000000111111222222333333444444555555666666777777888888999999 ..::////
βββββ βββ βββββ βββββ ββ βββββ βββββ βββββ βββββ βββββ ββ
ββ ββ βββ β ββ β ββ ββ ββ ββ ββ ββ ββ ββ ββ ββ ββ ββ
ββ ββ ββ βββ βββ βββββ βββββ βββββ ββ βββββ βββββ ββ
ββ ββ ββ βββ β ββ ββ ββ ββ ββ ββ ββ ββ ββ ββββ
βββββ ββββ βββββ βββββ ββ βββββ βββββ ββ βββββ βββββ ββ ββ
""")]
add_font("Half Block Heavy 6x5",HalfBlockHeavy6x5Font)
class Thin6x6Font(Font):
height = 6
data = [utf8decode("""
000000111111222222333333444444555555666666777777888888999999''
βββββ β βββββ βββββ β ββββ ββββ βββββ βββββ βββββ β
β β β β β β β β β β β β β β
β / β β βββββ ββ€ ββββΌβ βββββ βββββ βΌ βββββ€ βββββ€
β β β β β β β β β β β β β
βββββ β΄ ββββ βββββ β΄ ββββ βββββ β΄ βββββ ββββ
"""),utf8decode(r'''
!! """######$$$$$$%%%%%%&&&&&&((()))******++++++
β ββ β β βββΌββ ββ / βββ / \
β ββΌββΌβ β β ββ / β β β β \ / β
β β β βββΌββ / ββ\β β β ββXββ βββΌββ
β ββΌββΌβ β β / ββ β \, β β / \ β
. β β βββΌββ / ββ ββββ\ \ /
'''),utf8decode(r"""
,,-----..//////::;;<<<<=====>>>>??????@@@@@@
/ βββββ βββββ
/ . . / ββββ \ β βββββ€
ββββ / / \ βββ ββ β
/ . , \ ββββ / β βββββ
, . / \ / . βββββ
"""),utf8decode(r"""
[[\\\\\\]]^^^____``{{||}}~~~~~~
β \ β /\ \ β β β
β \ β β β β βββ
β \ β β€ β β βββ
β \ β β β β
β \ β ββββ β β β
"""),utf8decode("""
AAAAAABBBBBBCCCCCCDDDDDDEEEEEEFFFFFFGGGGGGHHHHHHIIJJJJJJ
βββββ β¬ββββ βββββ β¬ββββ β¬ββββ β¬ββββ βββββ β¬ β¬ β¬ β¬
β β β β β β β β β β β β β β
βββββ€ βββββ€ β β β βββ βββ β βββ¬ βββββ€ β β
β β β β β β β β β β β β β β β¬ β
β΄ β΄ β΄ββββ βββββ β΄ββββ β΄ββββ β΄ βββββ β΄ β΄ β΄ βββββ
"""),utf8decode("""
KKKKKKLLLLLLMMMMMMNNNNNNOOOOOOPPPPPPQQQQQQRRRRRRSSSSSS
β¬ β¬ β¬ βββ¬ββ β¬ββ β¬ βββββ β¬ββββ βββββ β¬ββββ βββββ
β βββ β β β β β β β β β β β β β β β β
βββ΄β β β β β β β β β β βββββ β β βββ¬ββ βββββ
β ββ β β β β β β β β β β ββ β βββ β
β΄ β΄ β΄ββββ β΄ β΄ β΄ βββ΄ βββββ β΄ ββββΌβ β΄ β΄ βββββ
β
"""),utf8decode("""
TTTTTTUUUUUUVVVVVVWWWWWWXXXXXXYYYYYYZZZZZZ
βββ¬ββ β¬ β¬ β¬ β¬ β¬ β¬ β¬ β¬ β¬ β¬ βββββ
β β β β β β β ββ ββ β β βββ
β β β β β β β β βββ€ βββ¬ββ ββ
β β β ββ ββ β β β ββ ββ β ββ
β΄ βββββ βββ βββ΄ββ β΄ β΄ β΄ βββββ
"""),utf8decode("""
aaaaaabbbbbbccccccddddddeeeeeefffgggggghhhhhhiijjj
βββ
β β β β . .
βββββ βββββ βββββ βββββ€ βββββ βΌ βββββ βββββ β β
βββββ€ β β β β β βββββ β β β β β β β
βββββ΄ βββββ βββββ βββββ βββββ β΄ βββββ€ β΄ β΄ β΄ β
βββββ ββ
"""),utf8decode("""
kkkkkkllmmmmmmnnnnnnooooooppppppqqqqqqrrrrrssssss
β β
β ββ β β¬ββ¬ββ β¬ββββ βββββ βββββ βββββ β¬βββ βββββ
βββ΄β β β β β β β β β β β β β β βββββ
β΄ ββ β β΄ β΄ β΄ β΄ βββββ βββββ βββββ€ β΄ βββββ
β β
"""),utf8decode("""
ttttuuuuuuvvvvvvwwwwwwxxxxxxyyyyyyzzzzzz
β
ββΌβ β¬ β¬ β¬ β¬ β¬ β¬ ββ ββ β¬ β¬ βββββ¬
β β β ββ ββ β β β βββ€ β β βββββ
ββ βββββ΄ βββ βββ΄ββ ββ ββ βββββ€ β΄ββββ
βββββ
""")]
add_font("Thin 6x6",Thin6x6Font)
class HalfBlock7x7Font(Font):
height = 7
data = [utf8decode("""
0000000111111122222223333333444444455555556666666777777788888889999999'''
βββββ βββ βββββ βββββ ββ βββββββ βββββ βββββββ βββββ βββββ ββ
ββ ββ βββ ββ ββββ ββββ ββ ββ ββ ββ ββ ββββ ββββ
ββ β ββ ββ ββ βββ βββββββββββββ ββββββ ββ βββββ ββββββ
ββ β ββ ββ βββ ββ ββ ββββ ββ ββ ββ ββ ββ
ββ ββ ββ βββ ββ ββ ββ ββββ ββ ββ ββ ββ ββ
βββββ ββββ βββββββ βββββ ββ ββββββ βββββ ββ βββββ βββββ
"""),utf8decode('''
!!! """""#######$$$$$$$%%%%%%%&&&&&&&(((())))*******++++++
ββ ββ ββ ββ ββ β β ββ βββ ββββ ββ ββ
ββ ββ βββββββββ βββββ βββββ ββ ββ ββ ββ βββββ ββ
ββ ββ ββ ββββββ β ββ βββ ββ ββ βββββββ βββββ
ββ βββββββ ββββββ ββ β βββββ ββ ββ βββββ ββ
ββ ββ βββββ βββββββ ββ ββ ββ ββ ββ
ββ β ββ β ββββββ ββββ
'''),utf8decode("""
,,,------.../////:::;;;<<<<<<<======>>>>>>>???????@@@@@@@
ββ βββ βββ βββββ βββββ
ββ ββ ββ βββ ββββββ βββ ββ ββββ ββββ
ββββββ ββ βββ βββ ββ βββββββ
ββ ββ ββ βββ ββββββ βββ ββ βββββββ
ββ β βββ βββ ββ βββ
ββ ββ ββ ββ βββββ
β
"""),utf8decode(r"""
[[[[\\\\\]]]]^^^^^^^_____```{{{{{|||}}}}}~~~~~~~´´´
ββββββ ββββ βββ ββ ββββ ββ ββ
ββ ββ ββ ββ ββ ββ ββ ββ ββ ββ ββ
ββ ββ ββββ ββ βββ ββ βββ βββββββ
ββ ββ ββ βββ ββ βββ ββ
ββ ββ ββ ββ ββ ββ
ββββ ββββββ βββββ ββββ ββ
"""),utf8decode("""
AAAAAAABBBBBBBCCCCCCCDDDDDDDEEEEEEEFFFFFFFGGGGGGGHHHHHHHIIIIJJJJJJJ
βββββ ββββββ βββββ ββββββ ββββββββββββββ βββββ ββ ββ βββ ββ
ββ ββββ ββββ ββ ββββ ββ ββ ββ ββ ββ ββ
βββββββββββββ ββ ββ βββββββ βββββ ββ βββββββ ββ ββ
ββ ββββ ββββ ββ ββββ ββ ββ βββββ ββ ββ ββ
ββ ββββ ββββ ββ ββββ ββ ββ ββββ ββ ββ ββ ββ
ββ ββββββββ βββββ ββββββ βββββββββ βββββ ββ ββ βββ βββββ
"""),utf8decode("""
KKKKKKKLLLLLLLMMMMMMMMNNNNNNNOOOOOOOPPPPPPPQQQQQQQRRRRRRRSSSSSSS
ββ ββββ ββββββ βββ ββ βββββ ββββββ βββββ ββββββ βββββ
ββ ββ ββ ββ ββ ββββββ ββββ ββββ ββββ ββββ ββββ
βββββ ββ ββ ββ ββββββ ββββ ββββββββ ββ ββββββββ βββββ
βββββ ββ ββ ββββ ββββββ ββββ ββ ββββ ββ ββ
ββ ββ ββ ββ ββββ ββββββ ββββ ββ ββββββ ββ ββ
ββ βββββββββββ ββββ βββ βββββ ββ βββββ ββ ββ βββββ
ββ
"""),utf8decode("""
TTTTTTTUUUUUUUVVVVVVVWWWWWWWWXXXXXXXYYYYYYYZZZZZZZ
ββββββββ ββββ ββββ ββββ ββ ββ βββββββββ
ββ ββ ββ ββ ββ ββ ββ ββ ββ ββ ββ ββ
ββ ββ ββ ββ ββ ββ ββ βββ βββ ββ
ββ ββ ββ βββ ββ ββ ββ βββ ββ ββ
ββ ββ ββ βββ ββ ββ ββ ββ ββ ββ ββ
ββ βββββ β ββββββ ββ ββ ββ βββββββ
"""),utf8decode("""
aaaaaaabbbbbbbcccccccdddddddeeeeeeefffffggggggghhhhhhhiiijjjj
ββ ββ βββ ββ ββ ββ
ββ ββ ββ ββ
βββββ ββββββ βββββ ββββββ βββββ ββββ βββββ ββββββ βββ βββ
βββββββ ββββ ββ βββββββββ ββ ββ ββββ ββ ββ ββ
βββββββββ ββββ ββ βββββββ ββ βββββββββ ββ ββ ββ
ββββββββββββ βββββ ββββββ βββββ ββ βββββββ ββ ββ ββ
βββββ βββ
"""),utf8decode("""
kkkkkkkllllmmmmmmmmnnnnnnnooooooopppppppqqqqqqqrrrrrrsssssss
ββ ββ
ββ ββ
ββ βββ ββ ββββββ ββββββ βββββ ββββββ ββββββ βββββ βββββ
βββββ ββ ββ ββ ββββ ββββ ββββ ββββ ββββ βββββ
ββββββ ββ ββ ββ ββββ ββββ ββββ ββββ ββββ βββββ
ββ ββ βββββ ββββ ββ βββββ ββββββ ββββββββ βββββ
ββ ββ
"""),utf8decode("""
tttttuuuuuuuvvvvvvvwwwwwwwwxxxxxxxyyyyyyyzzzzzzz
ββ
ββ
ββββββ ββββ ββββ ββββ ββββ βββββββββ
ββ ββ ββ ββ ββ ββ ββ βββββ ββ ββ βββ
ββ ββ ββ βββ ββ ββ ββ βββββ βββββββ βββ
ββ βββββ βββ ββββββ ββ ββ ββββββββββββ
βββββ
""")]
add_font("Half Block 7x7",HalfBlock7x7Font)
if __name__ == "__main__":
l = get_all_fonts()
all_ascii = "".join([chr(x) for x in range(32, 127)])
print "Available Fonts: (U) = UTF-8 required"
print "----------------"
for n,cls in l:
f = cls()
u = ""
if f.utf8_required:
u = "(U)"
print ("%-20s %3s " % (n,u)),
c = f.characters()
if c == all_ascii:
print "Full ASCII"
elif c.startswith(all_ascii):
print "Full ASCII + " + c[len(all_ascii):]
else:
print "Characters: " + c
| gpl-3.0 | -1,933,365,354,575,088,000 | 36.824561 | 78 | 0.270466 | false |
taranjeet/EvalAI | apps/web/migrations/0002_added_team_model.py | 5 | 1151 | # -*- coding: utf-8 -*-
# Generated by Django 1.10.2 on 2017-02-06 09:14
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('web', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Team',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100)),
('email', models.EmailField(max_length=70)),
('headshot', models.ImageField(blank=True, null=True, upload_to='headshots')),
('visible', models.BooleanField(default=True)),
('github_url', models.URLField()),
('linkedin_url', models.URLField()),
('personal_website', models.URLField()),
('team_type', models.CharField(choices=[('Core Team', 'Core Team'), ('Contributor', 'Contributor')], max_length=50)),
],
options={
'db_table': 'teams',
},
),
]
| bsd-3-clause | 3,876,430,059,609,022,000 | 34.96875 | 133 | 0.537793 | false |
perdona/sorl-thumbnail | tests/thumbnail_tests/test_parsers.py | 17 | 1229 | # -*- coding: utf-8 -*-
import unittest
from sorl.thumbnail.helpers import ThumbnailError
from sorl.thumbnail.parsers import parse_crop, parse_geometry
class CropParserTestCase(unittest.TestCase):
def test_alias_crop(self):
crop = parse_crop('center', (500, 500), (400, 400))
self.assertEqual(crop, (50, 50))
crop = parse_crop('right', (500, 500), (400, 400))
self.assertEqual(crop, (100, 50))
def test_percent_crop(self):
crop = parse_crop('50% 0%', (500, 500), (400, 400))
self.assertEqual(crop, (50, 0))
crop = parse_crop('10% 80%', (500, 500), (400, 400))
self.assertEqual(crop, (10, 80))
def test_px_crop(self):
crop = parse_crop('200px 33px', (500, 500), (400, 400))
self.assertEqual(crop, (100, 33))
def test_bad_crop(self):
self.assertRaises(ThumbnailError, parse_crop, '-200px', (500, 500), (400, 400))
class GeometryParserTestCase(unittest.TestCase):
def test_geometry(self):
g = parse_geometry('222x30')
self.assertEqual(g, (222, 30))
g = parse_geometry('222')
self.assertEqual(g, (222, None))
g = parse_geometry('x999')
self.assertEqual(g, (None, 999))
| bsd-3-clause | -7,757,403,440,231,434,000 | 33.138889 | 87 | 0.60537 | false |
giorgiop/scikit-learn | examples/mixture/plot_gmm_sin.py | 103 | 6101 | """
=================================
Gaussian Mixture Model Sine Curve
=================================
This example demonstrates the behavior of Gaussian mixture models fit on data
that was not sampled from a mixture of Gaussian random variables. The dataset
is formed by 100 points loosely spaced following a noisy sine curve. There is
therefore no ground truth value for the number of Gaussian components.
The first model is a classical Gaussian Mixture Model with 10 components fit
with the Expectation-Maximization algorithm.
The second model is a Bayesian Gaussian Mixture Model with a Dirichlet process
prior fit with variational inference. The low value of the concentration prior
makes the model favor a lower number of active components. This models
"decides" to focus its modeling power on the big picture of the structure of
the dataset: groups of points with alternating directions modeled by
non-diagonal covariance matrices. Those alternating directions roughly capture
the alternating nature of the original sine signal.
The third model is also a Bayesian Gaussian mixture model with a Dirichlet
process prior but this time the value of the concentration prior is higher
giving the model more liberty to model the fine-grained structure of the data.
The result is a mixture with a larger number of active components that is
similar to the first model where we arbitrarily decided to fix the number of
components to 10.
Which model is the best is a matter of subjective judgement: do we want to
favor models that only capture the big picture to summarize and explain most of
the structure of the data while ignoring the details or do we prefer models
that closely follow the high density regions of the signal?
The last two panels show how we can sample from the last two models. The
resulting samples distributions do not look exactly like the original data
distribution. The difference primarily stems from the approximation error we
made by using a model that assumes that the data was generated by a finite
number of Gaussian components instead of a continuous noisy sine curve.
"""
import itertools
import numpy as np
from scipy import linalg
import matplotlib.pyplot as plt
import matplotlib as mpl
from sklearn import mixture
print(__doc__)
color_iter = itertools.cycle(['navy', 'c', 'cornflowerblue', 'gold',
'darkorange'])
def plot_results(X, Y, means, covariances, index, title):
splot = plt.subplot(5, 1, 1 + index)
for i, (mean, covar, color) in enumerate(zip(
means, covariances, color_iter)):
v, w = linalg.eigh(covar)
v = 2. * np.sqrt(2.) * np.sqrt(v)
u = w[0] / linalg.norm(w[0])
# as the DP will not use every component it has access to
# unless it needs it, we shouldn't plot the redundant
# components.
if not np.any(Y == i):
continue
plt.scatter(X[Y == i, 0], X[Y == i, 1], .8, color=color)
# Plot an ellipse to show the Gaussian component
angle = np.arctan(u[1] / u[0])
angle = 180. * angle / np.pi # convert to degrees
ell = mpl.patches.Ellipse(mean, v[0], v[1], 180. + angle, color=color)
ell.set_clip_box(splot.bbox)
ell.set_alpha(0.5)
splot.add_artist(ell)
plt.xlim(-6., 4. * np.pi - 6.)
plt.ylim(-5., 5.)
plt.title(title)
plt.xticks(())
plt.yticks(())
def plot_samples(X, Y, n_components, index, title):
plt.subplot(5, 1, 4 + index)
for i, color in zip(range(n_components), color_iter):
# as the DP will not use every component it has access to
# unless it needs it, we shouldn't plot the redundant
# components.
if not np.any(Y == i):
continue
plt.scatter(X[Y == i, 0], X[Y == i, 1], .8, color=color)
plt.xlim(-6., 4. * np.pi - 6.)
plt.ylim(-5., 5.)
plt.title(title)
plt.xticks(())
plt.yticks(())
# Parameters
n_samples = 100
# Generate random sample following a sine curve
np.random.seed(0)
X = np.zeros((n_samples, 2))
step = 4. * np.pi / n_samples
for i in range(X.shape[0]):
x = i * step - 6.
X[i, 0] = x + np.random.normal(0, 0.1)
X[i, 1] = 3. * (np.sin(x) + np.random.normal(0, .2))
plt.figure(figsize=(10, 10))
plt.subplots_adjust(bottom=.04, top=0.95, hspace=.2, wspace=.05,
left=.03, right=.97)
# Fit a Gaussian mixture with EM using ten components
gmm = mixture.GaussianMixture(n_components=10, covariance_type='full',
max_iter=100).fit(X)
plot_results(X, gmm.predict(X), gmm.means_, gmm.covariances_, 0,
'Expectation-maximization')
dpgmm = mixture.BayesianGaussianMixture(
n_components=10, covariance_type='full', weight_concentration_prior=1e-2,
weight_concentration_prior_type='dirichlet_process',
mean_precision_prior=1e-2, covariance_prior=1e0 * np.eye(2),
init_params="random", max_iter=100, random_state=2).fit(X)
plot_results(X, dpgmm.predict(X), dpgmm.means_, dpgmm.covariances_, 1,
"Bayesian Gaussian mixture models with a Dirichlet process prior "
r"for $\gamma_0=0.01$.")
X_s, y_s = dpgmm.sample(n_samples=2000)
plot_samples(X_s, y_s, dpgmm.n_components, 0,
"Gaussian mixture with a Dirichlet process prior "
r"for $\gamma_0=0.01$ sampled with $2000$ samples.")
dpgmm = mixture.BayesianGaussianMixture(
n_components=10, covariance_type='full', weight_concentration_prior=1e+2,
weight_concentration_prior_type='dirichlet_process',
mean_precision_prior=1e-2, covariance_prior=1e0 * np.eye(2),
init_params="kmeans", max_iter=100, random_state=2).fit(X)
plot_results(X, dpgmm.predict(X), dpgmm.means_, dpgmm.covariances_, 2,
"Bayesian Gaussian mixture models with a Dirichlet process prior "
r"for $\gamma_0=100$")
X_s, y_s = dpgmm.sample(n_samples=2000)
plot_samples(X_s, y_s, dpgmm.n_components, 1,
"Gaussian mixture with a Dirichlet process prior "
r"for $\gamma_0=100$ sampled with $2000$ samples.")
plt.show()
| bsd-3-clause | 1,629,001,318,295,124,200 | 38.616883 | 79 | 0.673988 | false |
eatbyte/depot_tools | third_party/boto/auth_handler.py | 106 | 2059 | # Copyright 2010 Google Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
"""
Defines an interface which all Auth handlers need to implement.
"""
from plugin import Plugin
class NotReadyToAuthenticate(Exception):
pass
class AuthHandler(Plugin):
capability = []
def __init__(self, host, config, provider):
"""Constructs the handlers.
:type host: string
:param host: The host to which the request is being sent.
:type config: boto.pyami.Config
:param config: Boto configuration.
:type provider: boto.provider.Provider
:param provider: Provider details.
Raises:
NotReadyToAuthenticate: if this handler is not willing to
authenticate for the given provider and config.
"""
pass
def add_auth(self, http_request):
"""Invoked to add authentication details to request.
:type http_request: boto.connection.HTTPRequest
:param http_request: HTTP request that needs to be authenticated.
"""
pass
| bsd-3-clause | -9,181,785,203,906,295,000 | 34.5 | 74 | 0.709082 | false |
alexallah/django | django/middleware/clickjacking.py | 82 | 1733 | """
Clickjacking Protection Middleware.
This module provides a middleware that implements protection against a
malicious site loading resources from your site in a hidden frame.
"""
from django.conf import settings
from django.utils.deprecation import MiddlewareMixin
class XFrameOptionsMiddleware(MiddlewareMixin):
"""
Set the X-Frame-Options HTTP header in HTTP responses.
Do not set the header if it's already set or if the response contains
a xframe_options_exempt value set to True.
By default, set the X-Frame-Options header to 'SAMEORIGIN', meaning the
response can only be loaded on a frame within the same site. To prevent the
response from being loaded in a frame in any site, set X_FRAME_OPTIONS in
your project's Django settings to 'DENY'.
"""
def process_response(self, request, response):
# Don't set it if it's already in the response
if response.get('X-Frame-Options') is not None:
return response
# Don't set it if they used @xframe_options_exempt
if getattr(response, 'xframe_options_exempt', False):
return response
response['X-Frame-Options'] = self.get_xframe_options_value(request,
response)
return response
def get_xframe_options_value(self, request, response):
"""
Get the value to set for the X_FRAME_OPTIONS header. Use the value from
the X_FRAME_OPTIONS setting, or 'SAMEORIGIN' if not set.
This method can be overridden if needed, allowing it to vary based on
the request or response.
"""
return getattr(settings, 'X_FRAME_OPTIONS', 'SAMEORIGIN').upper()
| bsd-3-clause | -305,797,511,004,002,940 | 37.511111 | 79 | 0.669359 | false |
fbellini/AliPhysics | PWGJE/EMCALJetTasks/Tracks/analysis/base/struct/DataContainers.py | 41 | 12989 | #**************************************************************************
#* Copyright(c) 1998-2014, ALICE Experiment at CERN, All rights reserved. *
#* *
#* Author: The ALICE Off-line Project. *
#* Contributors are mentioned in the code where appropriate. *
#* *
#* Permission to use, copy, modify and distribute this software and its *
#* documentation strictly for non-commercial purposes is hereby granted *
#* without fee, provided that the above copyright notice appears in all *
#* copies and that both the copyright notice and this permission notice *
#* appear in the supporting documentation. The authors make no claims *
#* about the suitability of this software for any purpose. It is *
#* provided "as is" without express or implied warranty. *
#**************************************************************************
from PWGJE.EMCALJetTasks.Tracks.analysis.base.Helper import NormaliseBinWidth
from PWGJE.EMCALJetTasks.Tracks.analysis.base.struct.ClusterTHnSparse import ClusterTHnSparseNew, ClusterTHnSparseOld
from PWGJE.EMCALJetTasks.Tracks.analysis.base.struct.TrackTHnSparse import TrackTHnSparseNew, TrackTHnSparseOld
from PWGJE.EMCALJetTasks.Tracks.analysis.base.MergeException import MergeException
from copy import copy,deepcopy
from ROOT import TList
class DataContainer(object):
"""
Data container (event and spectum container)
"""
class DataException(Exception):
"""
Exception handling incomplete data
"""
def __init__(self, raiseobject):
"""
constructor
"""
self.__object = raiseobject
def __str__(self):
"""
Produce error string
"""
return "Container %s missing"
def __init__(self, eventHist = None, dataHist = None, histotype=None):
"""
Construct spectrum container
"""
self._events = eventHist
self._spectrum = dataHist
self.__doNormBW = True
self._datahistname = "DataHist"
def __copy__(self):
"""
Shallow copy constructor
"""
print "Simple copy called from %s" %(self.__class__)
newobject = DataContainer(self.__events, self._spectrum, None)
newobject._Copy(self)
return newobject
def __deepcopy__(self, memo):
"""
Deep copy constructor
"""
print "deep copy called from %s" %(self.__class__)
newobject = DataContainer(None, None, None)
newobject._Deepcopy(self, memo)
return newobject
def GetValues(self):
"""
For copy constructors
"""
return {"dn": self.__doNormBW, "hn": self._datahistname}
def __SetValues(self, values):
"""
For copy constructors
"""
self.__doNormBW = values["dn"]
self._datahistname = values["hn"]
def SetEventHist(self, eventHist):
"""
Set the event hist
"""
self._events = eventHist
def SetSpectrumContainer(self, cont):
"""
Initialise spectrum with container
"""
self._spectrum = cont
def GetEventHist(self):
"""
Access event counter histogram
"""
return self._events
def GetSpectrumContainer(self):
"""
Access underlying spectrum container
"""
return self.__spectrum
# Property definitions
EventHist = property(GetEventHist, fset=SetEventHist)
SpectrumHist = property(GetSpectrumContainer, fset=SetSpectrumContainer)
def _Copy(self, other):
"""
Make a shallow copy of the other object into this
"""
evhist = other.EventHist
speccont = other.SpectrumHist
if evhist:
self.EventHist = copy(evhist)
if speccont:
self.SpectrumHist = copy(speccont)
self.__SetValues(other.GetValues())
def _Deepcopy(self, other, memo):
"""
Make a deep copy of the other object into this
"""
evhist = other.EventHist
speccont = other.SpectrumHist
if evhist:
self.EventHist = deepcopy(evhist, memo)
if speccont:
self.SpectrumHist = deepcopy(speccont, memo)
self.__SetValues(other.GetValues())
def Add(self, other):
"""
Add other data container to this data container
"""
if not isinstance(other, DataContainer):
raise MergeException("Incompatible types: this(DataContainer), other(%s)" %(str(other.__class__)))
if self._events and other.EventHist:
self._events.Add(other.EventHist)
else:
if other.EventHist:
self._events = deepcopy(other.EventHist)
if self._spectrum and other.SpectrumHist:
self._spectrum.Add(other.SpectrumHist)
else:
if other.SpectrumHist:
self._spectrum = other.SpectrumHist
def Scale(self, scalefactor):
"""
Scale the underlying spectrum container with the scale factor
"""
self.__spectrum.Scale(scalefactor)
def GetRootPrimitive(self, name):
"""
Convert object to a root primitive (so that it can be wirtten to a rootfile)
"""
result = TList()
result.SetName(name)
events = self._events.GetROOTHisto()
events.SetName("events")
result.Add(events)
spectrum = self.__spectrum.GetHistogram()
spectrum.SetName("spectrum")
result.Add(spectrum)
return result
@staticmethod
def GetHistoPair(rootprimitive):
"""
Convert to TList to python dictionary
"""
return {"events":rootprimitive.FindObject("events"), "spectrum":rootprimitive.FindObject("spectrum")}
def GetEventCount(self):
"""
Get the number of selected events
"""
return self._events.GetEventCount()
def MakeProjection(self, dim, histname = None, xtitle = None, ytitle = None, doNorm = True):
"""
Make event-normalised projection to 1D
"""
if not self._spectrum:
raise DataContainer.DataException(self._datahistname)
if not self._events:
raise DataContainer.DataException("EventHist")
if not histname:
histname = "%sProjected" %(self._spectrum.GetHistogramName())
if not xtitle:
xtitle = ""
if not ytitle:
ytitle = ""
projected = self._spectrum.Projection1D(histname, self._spectrum.GetAxisDefinition().GetAxisName(dim))
projected.GetXaxis().SetTitle(xtitle)
projected.GetYaxis().SetTitle(ytitle)
projected.Sumw2()
if doNorm:
NormaliseBinWidth(projected)
# Normalise by number of events
projected.Scale(1./self.GetEventCount())
return projected
def Print(self):
"""
Print status of the data container
"""
statusEvents = "no"
if self._events:
statusEvents = "yes"
statusSpectrum = "no"
if self._spectrum:
statusSpectrum = "yes"
print "Datacontainer status: events: %s , Spectrum: %s" %(statusEvents, statusSpectrum)
if self._spectrum:
self._spectrum.Print()
class TrackContainer(DataContainer):
"""
Data representation of track specific histograms
"""
def __init__(self, eventHist = None, trackHist = None, dataformat = "new"):
"""
Constructor, initialising base class and additional data members
"""
trackwrapper = None
if trackHist:
if dataformat == "new":
trackwrapper = TrackTHnSparseNew(trackHist)
else:
trackwrapper = TrackTHnSparseOld(trackHist)
DataContainer.__init__(self, eventHist, trackwrapper, "tracks")
self._datahistname = "TrackHist"
def SetVertexRange(self, minv, maxv):
"""
Apply vertex selection both to the event counter and the track hist
"""
if self._spectrum:
self._spectrum.SetVertexCut(minv, maxv)
if self._events:
self._events.SetVertexRange(minv, maxv)
def SetEtaRange(self, etamin, etamax):
"""
Select tracks in a given eta range
"""
if self._spectrum:
self._spectrum.SetEtaCut(etamin, etamax)
def SetPhiRange(self, phimin, phimax):
"""
Select tracks in a given eta range
"""
if self._spectrum:
self._spectrum.SetPhiCut(phimin, phimax)
def SetPileupRejection(self, on):
"""
Apply pileup rejection (yes or no)
"""
self._events.SetUsePileupRejected(on)
if self._spectrum:
self._spectrum.SetPileupRejection(on)
def SelectTrackCuts(self, cutID):
"""
Select a set of track cuts
"""
if self._spectrum:
self._spectrum.SelectTrackCuts(cutID)
def RequestSeenInMinBias(self):
if self._spectrum:
self._spectrum.SetRequestSeenInMB()
def __copy__(self):
"""
Shallow copy constructor
"""
print "Simple copy called from %s" %(self.__class__)
newobject = TrackContainer(self._events, self._spectrum, None)
newobject._Copy(self)
return newobject
def __deepcopy__(self, memo):
"""
Deep copy constructor
"""
print "deep copy called from %s" %(self.__class__)
newobject = TrackContainer(None, None, None)
newobject._Deepcopy(self, memo)
return newobject
@staticmethod
def BuildFromRootPrimitive(rootprimitive):
"""
Build a cluster container from a root primitive
"""
infopair = DataContainer.GetHistoPair(rootprimitive)
return TrackContainer(infopair["events"], infopair["spectrum"])
class ClusterContainer(DataContainer):
"""
Data representation of cluster specific histograms
"""
def __init__(self, eventHist = None, clusterHist = None, dataformat = "new"):
"""
Constructor, initialising base class and additional data members
"""
clusterwrapper = None
if dataformat == "new":
clusterwrapper = ClusterTHnSparseNew(clusterHist)
else:
clusterwrapper = ClusterTHnSparseOld(clusterHist)
DataContainer.__init__(self, eventHist, clusterwrapper, "clusters")
self._datahistname = "ClusterHist"
def SetVertexRange(self, minv, maxv):
"""
Apply vertex selection both to the event counter and the track hist
"""
if self._spectrum:
self._spectrum.SetVertexCut(minv, maxv)
if self._events:
self._events.SetVertexRange(minv, maxv)
def SetEtaRange(self, etamin, etamax):
"""
Select tracks in a given eta range
"""
if self._spectrum:
self._spectrum.SetEtaCut(etamin, etamax)
def SePhiRange(self, phimin, phimax):
"""
Select tracks in a given eta range
"""
if self._spectrum:
self._spectrum.SetPhiCut(phimin, phimax)
def SetPileupRejection(self, on):
"""
Apply pileup rejection (yes or no)
"""
self._events.SetUsePileupRejected(on)
self._spectrum.SetPileupRejection(on)
def RequestSeenInMinBias(self):
if self._spectrum:
self._spectrum.SetRequestSeenInMB()
def __copy__(self):
"""
Shallow copy constructor
"""
print "Simple copy called from %s" %(self.__class__)
newobject = ClusterContainer(self._events, self._spectrum, None)
newobject._Copy(self)
return newobject
def __deepcopy__(self, memo):
"""
Deep copy constructor
"""
print "deep copy called from %s" %(self.__class__)
newobject = ClusterContainer(None, None, None)
newobject._Deepcopy(self, memo)
return newobject
@staticmethod
def BuildFromRootPrimitive(rootprimitive):
"""
Build a cluster container from a root primitive
"""
infopair = DataContainer.GetHistoPair(rootprimitive)
return ClusterContainer(infopair["events"], infopair["spectrum"])
| bsd-3-clause | -3,446,530,416,506,553,000 | 32.476804 | 117 | 0.564478 | false |
sublime1809/django | django/core/management/utils.py | 20 | 2832 | from __future__ import unicode_literals
import os
from subprocess import PIPE, Popen
import sys
from django.utils.encoding import force_text, DEFAULT_LOCALE_ENCODING
from django.utils import six
from .base import CommandError
def popen_wrapper(args, os_err_exc_type=CommandError):
"""
Friendly wrapper around Popen.
Returns stdout output, stderr output and OS status code.
"""
try:
p = Popen(args, shell=False, stdout=PIPE, stderr=PIPE,
close_fds=os.name != 'nt', universal_newlines=True)
except OSError as e:
strerror = force_text(e.strerror, DEFAULT_LOCALE_ENCODING,
strings_only=True)
six.reraise(os_err_exc_type, os_err_exc_type('Error executing %s: %s' %
(args[0], strerror)), sys.exc_info()[2])
output, errors = p.communicate()
return (
output,
force_text(errors, DEFAULT_LOCALE_ENCODING, strings_only=True),
p.returncode
)
def handle_extensions(extensions=('html',), ignored=('py',)):
"""
Organizes multiple extensions that are separated with commas or passed by
using --extension/-e multiple times. Note that the .py extension is ignored
here because of the way non-*.py files are handled in make_messages() (they
are copied to file.ext.py files to trick xgettext to parse them as Python
files).
For example: running 'django-admin makemessages -e js,txt -e xhtml -a'
would result in an extension list: ['.js', '.txt', '.xhtml']
>>> handle_extensions(['.html', 'html,js,py,py,py,.py', 'py,.py'])
set(['.html', '.js'])
>>> handle_extensions(['.html, txt,.tpl'])
set(['.html', '.tpl', '.txt'])
"""
ext_list = []
for ext in extensions:
ext_list.extend(ext.replace(' ', '').split(','))
for i, ext in enumerate(ext_list):
if not ext.startswith('.'):
ext_list[i] = '.%s' % ext_list[i]
return set(x for x in ext_list if x.strip('.') not in ignored)
def find_command(cmd, path=None, pathext=None):
if path is None:
path = os.environ.get('PATH', '').split(os.pathsep)
if isinstance(path, six.string_types):
path = [path]
# check if there are funny path extensions for executables, e.g. Windows
if pathext is None:
pathext = os.environ.get('PATHEXT', '.COM;.EXE;.BAT;.CMD').split(os.pathsep)
# don't use extensions if the command ends with one of them
for ext in pathext:
if cmd.endswith(ext):
pathext = ['']
break
# check if we find the command on PATH
for p in path:
f = os.path.join(p, cmd)
if os.path.isfile(f):
return f
for ext in pathext:
fext = f + ext
if os.path.isfile(fext):
return fext
return None
| bsd-3-clause | -6,176,149,575,796,016,000 | 33.536585 | 84 | 0.606992 | false |
exploreodoo/datStruct | odoo/addons/crm/__openerp__.py | 258 | 4199 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'CRM',
'version': '1.0',
'category': 'Customer Relationship Management',
'sequence': 2,
'summary': 'Leads, Opportunities, Phone Calls',
'description': """
The generic OpenERP Customer Relationship Management
====================================================
This application enables a group of people to intelligently and efficiently manage leads, opportunities, meetings and phone calls.
It manages key tasks such as communication, identification, prioritization, assignment, resolution and notification.
OpenERP ensures that all cases are successfully tracked by users, customers and suppliers. It can automatically send reminders, escalate the request, trigger specific methods and many other actions based on your own enterprise rules.
The greatest thing about this system is that users don't need to do anything special. The CRM module has an email gateway for the synchronization interface between mails and OpenERP. That way, users can just send emails to the request tracker.
OpenERP will take care of thanking them for their message, automatically routing it to the appropriate staff and make sure all future correspondence gets to the right place.
Dashboard for CRM will include:
-------------------------------
* Planned Revenue by Stage and User (graph)
* Opportunities by Stage (graph)
""",
'author': 'OpenERP SA',
'website': 'https://www.odoo.com/page/crm',
'depends': [
'base_action_rule',
'base_setup',
'sales_team',
'mail',
'email_template',
'calendar',
'resource',
'board',
'fetchmail',
],
'data': [
'crm_data.xml',
'crm_lead_data.xml',
'crm_phonecall_data.xml',
'security/crm_security.xml',
'security/ir.model.access.csv',
'wizard/crm_lead_to_opportunity_view.xml',
'wizard/crm_phonecall_to_phonecall_view.xml',
'wizard/crm_merge_opportunities_view.xml',
'crm_view.xml',
'crm_phonecall_view.xml',
'crm_phonecall_menu.xml',
'crm_lead_view.xml',
'crm_lead_menu.xml',
'calendar_event_menu.xml',
'report/crm_lead_report_view.xml',
'report/crm_opportunity_report_view.xml',
'report/crm_phonecall_report_view.xml',
'res_partner_view.xml',
'res_config_view.xml',
'base_partner_merge_view.xml',
'sales_team_view.xml',
],
'demo': [
'crm_demo.xml',
'crm_lead_demo.xml',
'crm_phonecall_demo.xml',
'crm_action_rule_demo.xml',
],
'test': [
'test/crm_access_group_users.yml',
'test/crm_lead_message.yml',
'test/lead2opportunity2win.yml',
'test/lead2opportunity_assign_salesmen.yml',
'test/crm_lead_merge.yml',
'test/crm_lead_cancel.yml',
'test/segmentation.yml',
'test/phonecalls.yml',
'test/crm_lead_onchange.yml',
'test/crm_lead_copy.yml',
'test/crm_lead_unlink.yml',
'test/crm_lead_find_stage.yml',
],
'installable': True,
'application': True,
'auto_install': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| gpl-2.0 | -520,908,180,522,493,950 | 33.702479 | 243 | 0.618004 | false |
nilsonmorales/Puppyes-nightrc | usr/local/lib/python2.7/dist-packages/youtube_dl/extractor/ard.py | 6 | 2195 | import re
from .common import InfoExtractor
from ..utils import (
ExtractorError,
)
class ARDIE(InfoExtractor):
_VALID_URL = r'^(?:https?://)?(?:(?:www\.)?ardmediathek\.de|mediathek\.daserste\.de)/(?:.*/)(?P<video_id>[^/\?]+)(?:\?.*)?'
_TITLE = r'<h1(?: class="boxTopHeadline")?>(?P<title>.*)</h1>'
_MEDIA_STREAM = r'mediaCollection\.addMediaStream\((?P<media_type>\d+), (?P<quality>\d+), "(?P<rtmp_url>[^"]*)", "(?P<video_url>[^"]*)", "[^"]*"\)'
_TEST = {
u'url': u'http://www.ardmediathek.de/das-erste/tagesschau-in-100-sek?documentId=14077640',
u'file': u'14077640.mp4',
u'md5': u'6ca8824255460c787376353f9e20bbd8',
u'info_dict': {
u"title": u"11.04.2013 09:23 Uhr - Tagesschau in 100 Sekunden"
},
u'skip': u'Requires rtmpdump'
}
def _real_extract(self, url):
# determine video id from url
m = re.match(self._VALID_URL, url)
numid = re.search(r'documentId=([0-9]+)', url)
if numid:
video_id = numid.group(1)
else:
video_id = m.group('video_id')
# determine title and media streams from webpage
html = self._download_webpage(url, video_id)
title = re.search(self._TITLE, html).group('title')
streams = [mo.groupdict() for mo in re.finditer(self._MEDIA_STREAM, html)]
if not streams:
assert '"fsk"' in html
raise ExtractorError(u'This video is only available after 8:00 pm')
# choose default media type and highest quality for now
stream = max([s for s in streams if int(s["media_type"]) == 0],
key=lambda s: int(s["quality"]))
# there's two possibilities: RTMP stream or HTTP download
info = {'id': video_id, 'title': title, 'ext': 'mp4'}
if stream['rtmp_url']:
self.to_screen(u'RTMP download detected')
assert stream['video_url'].startswith('mp4:')
info["url"] = stream["rtmp_url"]
info["play_path"] = stream['video_url']
else:
assert stream["video_url"].endswith('.mp4')
info["url"] = stream["video_url"]
return [info]
| gpl-3.0 | -4,853,257,961,968,733,000 | 39.648148 | 151 | 0.558542 | false |
Atlas7/deepdive | examples/tutorial_example/step3-more-data/experiment-reports/v00001/code/udf/ext_people.py | 161 | 1752 | #! /usr/bin/env python
# Sample input data (piped into STDIN):
'''
118238@10 Sen.~^~Barack~^~Obama~^~and~^~his~^~wife~^~,~^~Michelle~^~Obama~^~,~^~have~^~released~^~eight~^~years~^~of~^~joint~^~returns~^~. O~^~PERSON~^~PERSON~^~O~^~O~^~O~^~O~^~PERSON~^~PERSON~^~O~^~O~^~O~^~DURATION~^~DURATION~^~O~^~O~^~O~^~O
118238@12 During~^~the~^~2004~^~presidential~^~campaign~^~,~^~we~^~urged~^~Teresa~^~Heinz~^~Kerry~^~,~^~the~^~wealthy~^~wife~^~of~^~Sen.~^~John~^~Kerry~^~,~^~to~^~release~^~her~^~tax~^~returns~^~. O~^~O~^~DATE~^~O~^~O~^~O~^~O~^~O~^~PERSON~^~PERSON~^~PERSON~^~O~^~O~^~O~^~O~^~O~^~O~^~PERSON~^~PERSON~^~O~^~O~^~O~^~O~^~O~^~O~^~O
'''
import sys
ARR_DELIM = '~^~'
# For-loop for each row in the input query
for row in sys.stdin:
# Find phrases that are continuous words tagged with PERSON.
sentence_id, words_str, ner_tags_str = row.strip().split('\t')
words = words_str.split(ARR_DELIM)
ner_tags = ner_tags_str.split(ARR_DELIM)
start_index = 0
phrases = []
while start_index < len(words):
# Checking if there is a PERSON phrase starting from start_index
index = start_index
while index < len(words) and ner_tags[index] == "PERSON":
index += 1
if index != start_index: # found a person from "start_index" to "index"
text = ' '.join(words[start_index:index])
length = index - start_index
phrases.append((start_index, length, text))
start_index = index + 1
# Output a tuple for each PERSON phrase
for start_position, length, text in phrases:
print '\t'.join(
[ str(x) for x in [
sentence_id,
start_position, # start_position
length, # length
text, # text
'%s_%d' % (sentence_id, start_position) # mention_id
]])
| apache-2.0 | 713,878,411,720,616,000 | 40.714286 | 326 | 0.590183 | false |
MadCat34/Sick-Beard | lib/requests/packages/oauthlib/oauth2/draft25/__init__.py | 74 | 22862 | """
oauthlib.oauth2.draft_25
~~~~~~~~~~~~~~
This module is an implementation of various logic needed
for signing and checking OAuth 2.0 draft 25 requests.
"""
from tokens import prepare_bearer_uri, prepare_bearer_headers
from tokens import prepare_bearer_body, prepare_mac_header
from parameters import prepare_grant_uri, prepare_token_request
from parameters import parse_authorization_code_response
from parameters import parse_implicit_response, parse_token_response
AUTH_HEADER = u'auth_header'
URI_QUERY = u'query'
BODY = u'body'
class Client(object):
def __init__(self, client_id,
default_redirect_uri=None,
token_type=None,
access_token=None,
refresh_token=None):
"""Initialize a client with commonly used attributes."""
self.client_id = client_id
self.default_redirect_uri = default_redirect_uri
self.token_type = token_type
self.access_token = access_token
self.refresh_token = refresh_token
self.token_types = {
u'bearer': self._add_bearer_token,
u'mac': self._add_mac_token
}
def add_token(self, uri, http_method=u'GET', body=None, headers=None,
token_placement=AUTH_HEADER):
"""Add token to the request uri, body or authorization header.
The access token type provides the client with the information
required to successfully utilize the access token to make a protected
resource request (along with type-specific attributes). The client
MUST NOT use an access token if it does not understand the token
type.
For example, the "bearer" token type defined in
[I-D.ietf-oauth-v2-bearer] is utilized by simply including the access
token string in the request:
GET /resource/1 HTTP/1.1
Host: example.com
Authorization: Bearer mF_9.B5f-4.1JqM
while the "mac" token type defined in [I-D.ietf-oauth-v2-http-mac] is
utilized by issuing a MAC key together with the access token which is
used to sign certain components of the HTTP requests:
GET /resource/1 HTTP/1.1
Host: example.com
Authorization: MAC id="h480djs93hd8",
nonce="274312:dj83hs9s",
mac="kDZvddkndxvhGRXZhvuDjEWhGeE="
.. _`I-D.ietf-oauth-v2-bearer`: http://tools.ietf.org/html/draft-ietf-oauth-v2-28#ref-I-D.ietf-oauth-v2-bearer
.. _`I-D.ietf-oauth-v2-http-mac`: http://tools.ietf.org/html/draft-ietf-oauth-v2-28#ref-I-D.ietf-oauth-v2-http-mac
"""
return self.token_types[self.token_type](uri, http_method, body,
headers, token_placement)
def prepare_refresh_body(self, body=u'', refresh_token=None, scope=None):
"""Prepare an access token request, using a refresh token.
If the authorization server issued a refresh token to the client, the
client makes a refresh request to the token endpoint by adding the
following parameters using the "application/x-www-form-urlencoded"
format in the HTTP request entity-body:
grant_type
REQUIRED. Value MUST be set to "refresh_token".
refresh_token
REQUIRED. The refresh token issued to the client.
scope
OPTIONAL. The scope of the access request as described by
Section 3.3. The requested scope MUST NOT include any scope
not originally granted by the resource owner, and if omitted is
treated as equal to the scope originally granted by the
resource owner.
"""
refresh_token = refresh_token or self.refresh_token
return prepare_token_request(u'refresh_token', body=body, scope=scope,
refresh_token=refresh_token)
def _add_bearer_token(self, uri, http_method=u'GET', body=None,
headers=None, token_placement=AUTH_HEADER):
"""Add a bearer token to the request uri, body or authorization header."""
if token_placement == AUTH_HEADER:
headers = prepare_bearer_headers(self.token, headers)
if token_placement == URI_QUERY:
uri = prepare_bearer_uri(self.token, uri)
if token_placement == BODY:
body = prepare_bearer_body(self.token, body)
return uri, headers, body
def _add_mac_token(self, uri, http_method=u'GET', body=None,
headers=None, token_placement=AUTH_HEADER):
"""Add a MAC token to the request authorization header."""
headers = prepare_mac_header(self.token, uri, self.key, http_method,
headers=headers, body=body, ext=self.ext,
hash_algorithm=self.hash_algorithm)
return uri, headers, body
def _populate_attributes(self, response):
"""Add commonly used values such as access_token to self."""
if u'access_token' in response:
self.access_token = response.get(u'access_token')
if u'refresh_token' in response:
self.refresh_token = response.get(u'refresh_token')
if u'token_type' in response:
self.token_type = response.get(u'token_type')
if u'expires_in' in response:
self.expires_in = response.get(u'expires_in')
if u'code' in response:
self.code = response.get(u'code')
def prepare_request_uri(self, *args, **kwargs):
"""Abstract method used to create request URIs."""
raise NotImplementedError("Must be implemented by inheriting classes.")
def prepare_request_body(self, *args, **kwargs):
"""Abstract method used to create request bodies."""
raise NotImplementedError("Must be implemented by inheriting classes.")
def parse_request_uri_response(self, *args, **kwargs):
"""Abstract method used to parse redirection responses."""
def parse_request_body_response(self, *args, **kwargs):
"""Abstract method used to parse JSON responses."""
class WebApplicationClient(Client):
"""A client utilizing the authorization code grant workflow.
A web application is a confidential client running on a web
server. Resource owners access the client via an HTML user
interface rendered in a user-agent on the device used by the
resource owner. The client credentials as well as any access
token issued to the client are stored on the web server and are
not exposed to or accessible by the resource owner.
The authorization code grant type is used to obtain both access
tokens and refresh tokens and is optimized for confidential clients.
As a redirection-based flow, the client must be capable of
interacting with the resource owner's user-agent (typically a web
browser) and capable of receiving incoming requests (via redirection)
from the authorization server.
"""
def prepare_request_uri(self, uri, redirect_uri=None, scope=None,
state=None, **kwargs):
"""Prepare the authorization code request URI
The client constructs the request URI by adding the following
parameters to the query component of the authorization endpoint URI
using the "application/x-www-form-urlencoded" format as defined by
[`W3C.REC-html401-19991224`_]:
response_type
REQUIRED. Value MUST be set to "code".
client_id
REQUIRED. The client identifier as described in `Section 2.2`_.
redirect_uri
OPTIONAL. As described in `Section 3.1.2`_.
scope
OPTIONAL. The scope of the access request as described by
`Section 3.3`_.
state
RECOMMENDED. An opaque value used by the client to maintain
state between the request and callback. The authorization
server includes this value when redirecting the user-agent back
to the client. The parameter SHOULD be used for preventing
cross-site request forgery as described in `Section 10.12`_.
.. _`W3C.REC-html401-19991224`: http://tools.ietf.org/html/draft-ietf-oauth-v2-28#ref-W3C.REC-html401-19991224
.. _`Section 2.2`: http://tools.ietf.org/html/draft-ietf-oauth-v2-28#section-2.2
.. _`Section 3.1.2`: http://tools.ietf.org/html/draft-ietf-oauth-v2-28#section-3.1.2
.. _`Section 3.3`: http://tools.ietf.org/html/draft-ietf-oauth-v2-28#section-3.3
.. _`Section 10.12`: http://tools.ietf.org/html/draft-ietf-oauth-v2-28#section-10.12
"""
redirect_uri = redirect_uri or self.default_redirect_uri
return prepare_grant_uri(uri, self.client_id, u'code',
redirect_uri=redirect_uri, scope=scope, state=state, **kwargs)
def prepare_request_body(self, code, body=u'', redirect_uri=None, **kwargs):
"""Prepare the access token request body.
The client makes a request to the token endpoint by adding the
following parameters using the "application/x-www-form-urlencoded"
format in the HTTP request entity-body:
grant_type
REQUIRED. Value MUST be set to "authorization_code".
code
REQUIRED. The authorization code received from the
authorization server.
redirect_uri
REQUIRED, if the "redirect_uri" parameter was included in the
authorization request as described in Section 4.1.1, and their
values MUST be identical.
.. _`Section 4.1.1`: http://tools.ietf.org/html/draft-ietf-oauth-v2-28#section-4.1.1
"""
redirect_uri = redirect_uri or self.default_redirect_uri
code = code or self.code
return prepare_token_request(u'authorization_code', code=code, body=body,
redirect_uri=redirect_uri, **kwargs)
def parse_request_uri_response(self, uri, state=None):
"""Parse the URI query for code and state.
If the resource owner grants the access request, the authorization
server issues an authorization code and delivers it to the client by
adding the following parameters to the query component of the
redirection URI using the "application/x-www-form-urlencoded" format:
code
REQUIRED. The authorization code generated by the
authorization server. The authorization code MUST expire
shortly after it is issued to mitigate the risk of leaks. A
maximum authorization code lifetime of 10 minutes is
RECOMMENDED. The client MUST NOT use the authorization code
more than once. If an authorization code is used more than
once, the authorization server MUST deny the request and SHOULD
revoke (when possible) all tokens previously issued based on
that authorization code. The authorization code is bound to
the client identifier and redirection URI.
state
REQUIRED if the "state" parameter was present in the client
authorization request. The exact value received from the
client.
"""
response = parse_authorization_code_response(uri, state=state)
self._populate_attributes(response)
return response
def parse_request_body_response(self, body, scope=None):
"""Parse the JSON response body.
If the access token request is valid and authorized, the
authorization server issues an access token and optional refresh
token as described in `Section 5.1`_. If the request client
authentication failed or is invalid, the authorization server returns
an error response as described in `Section 5.2`_.
.. `Section 5.1`: http://tools.ietf.org/html/draft-ietf-oauth-v2-28#section-5.1
.. `Section 5.2`: http://tools.ietf.org/html/draft-ietf-oauth-v2-28#section-5.2
"""
response = parse_token_response(body, scope=scope)
self._populate_attributes(response)
return response
class UserAgentClient(Client):
"""A public client utilizing the implicit code grant workflow.
A user-agent-based application is a public client in which the
client code is downloaded from a web server and executes within a
user-agent (e.g. web browser) on the device used by the resource
owner. Protocol data and credentials are easily accessible (and
often visible) to the resource owner. Since such applications
reside within the user-agent, they can make seamless use of the
user-agent capabilities when requesting authorization.
The implicit grant type is used to obtain access tokens (it does not
support the issuance of refresh tokens) and is optimized for public
clients known to operate a particular redirection URI. These clients
are typically implemented in a browser using a scripting language
such as JavaScript.
As a redirection-based flow, the client must be capable of
interacting with the resource owner's user-agent (typically a web
browser) and capable of receiving incoming requests (via redirection)
from the authorization server.
Unlike the authorization code grant type in which the client makes
separate requests for authorization and access token, the client
receives the access token as the result of the authorization request.
The implicit grant type does not include client authentication, and
relies on the presence of the resource owner and the registration of
the redirection URI. Because the access token is encoded into the
redirection URI, it may be exposed to the resource owner and other
applications residing on the same device.
"""
def prepare_request_uri(self, uri, redirect_uri=None, scope=None,
state=None, **kwargs):
"""Prepare the implicit grant request URI.
The client constructs the request URI by adding the following
parameters to the query component of the authorization endpoint URI
using the "application/x-www-form-urlencoded" format:
response_type
REQUIRED. Value MUST be set to "token".
client_id
REQUIRED. The client identifier as described in Section 2.2.
redirect_uri
OPTIONAL. As described in Section 3.1.2.
scope
OPTIONAL. The scope of the access request as described by
Section 3.3.
state
RECOMMENDED. An opaque value used by the client to maintain
state between the request and callback. The authorization
server includes this value when redirecting the user-agent back
to the client. The parameter SHOULD be used for preventing
cross-site request forgery as described in Section 10.12.
"""
redirect_uri = redirect_uri or self.default_redirect_uri
return prepare_grant_uri(uri, self.client_id, u'token',
redirect_uri=redirect_uri, state=state, scope=scope, **kwargs)
def parse_request_uri_response(self, uri, state=None, scope=None):
"""Parse the response URI fragment.
If the resource owner grants the access request, the authorization
server issues an access token and delivers it to the client by adding
the following parameters to the fragment component of the redirection
URI using the "application/x-www-form-urlencoded" format:
access_token
REQUIRED. The access token issued by the authorization server.
token_type
REQUIRED. The type of the token issued as described in
`Section 7.1`_. Value is case insensitive.
expires_in
RECOMMENDED. The lifetime in seconds of the access token. For
example, the value "3600" denotes that the access token will
expire in one hour from the time the response was generated.
If omitted, the authorization server SHOULD provide the
expiration time via other means or document the default value.
scope
OPTIONAL, if identical to the scope requested by the client,
otherwise REQUIRED. The scope of the access token as described
by `Section 3.3`_.
state
REQUIRED if the "state" parameter was present in the client
authorization request. The exact value received from the
client.
.. _`Section 7.1`: http://tools.ietf.org/html/draft-ietf-oauth-v2-28#section-7.1
.. _`Section 3.3`: http://tools.ietf.org/html/draft-ietf-oauth-v2-28#section-3.3
"""
response = parse_implicit_response(uri, state=state, scope=scope)
self._populate_attributes(response)
return response
class NativeApplicationClient(Client):
"""A public client utilizing the client credentials grant workflow.
A native application is a public client installed and executed on
the device used by the resource owner. Protocol data and
credentials are accessible to the resource owner. It is assumed
that any client authentication credentials included in the
application can be extracted. On the other hand, dynamically
issued credentials such as access tokens or refresh tokens can
receive an acceptable level of protection. At a minimum, these
credentials are protected from hostile servers with which the
application may interact with. On some platforms these
credentials might be protected from other applications residing on
the same device.
The client can request an access token using only its client
credentials (or other supported means of authentication) when the
client is requesting access to the protected resources under its
control, or those of another resource owner which has been previously
arranged with the authorization server (the method of which is beyond
the scope of this specification).
The client credentials grant type MUST only be used by confidential
clients.
Since the client authentication is used as the authorization grant,
no additional authorization request is needed.
"""
def prepare_request_body(self, body=u'', scope=None, **kwargs):
"""Add the client credentials to the request body.
The client makes a request to the token endpoint by adding the
following parameters using the "application/x-www-form-urlencoded"
format in the HTTP request entity-body:
grant_type
REQUIRED. Value MUST be set to "client_credentials".
scope
OPTIONAL. The scope of the access request as described by
`Section 3.3`_.
.. _`Section 3.3`: http://tools.ietf.org/html/draft-ietf-oauth-v2-28#section-3.3
"""
return prepare_token_request(u'client_credentials', body=body,
scope=scope, **kwargs)
def parse_request_body_response(self, body, scope=None):
"""Parse the JSON response body.
If the access token request is valid and authorized, the
authorization server issues an access token as described in
`Section 5.1`_. A refresh token SHOULD NOT be included. If the request
failed client authentication or is invalid, the authorization server
returns an error response as described in `Section 5.2`_.
.. `Section 5.1`: http://tools.ietf.org/html/draft-ietf-oauth-v2-28#section-5.1
.. `Section 5.2`: http://tools.ietf.org/html/draft-ietf-oauth-v2-28#section-5.2
"""
response = parse_token_response(body, scope=scope)
self._populate_attributes(response)
return response
class PasswordCredentialsClient(Client):
"""A public client using the resource owner password and username directly.
The resource owner password credentials grant type is suitable in
cases where the resource owner has a trust relationship with the
client, such as the device operating system or a highly privileged
application. The authorization server should take special care when
enabling this grant type, and only allow it when other flows are not
viable.
The grant type is suitable for clients capable of obtaining the
resource owner's credentials (username and password, typically using
an interactive form). It is also used to migrate existing clients
using direct authentication schemes such as HTTP Basic or Digest
authentication to OAuth by converting the stored credentials to an
access token.
The method through which the client obtains the resource owner
credentials is beyond the scope of this specification. The client
MUST discard the credentials once an access token has been obtained.
"""
def prepare_request_body(self, username, password, body=u'', scope=None,
**kwargs):
"""Add the resource owner password and username to the request body.
The client makes a request to the token endpoint by adding the
following parameters using the "application/x-www-form-urlencoded"
format in the HTTP request entity-body:
grant_type
REQUIRED. Value MUST be set to "password".
username
REQUIRED. The resource owner username.
password
REQUIRED. The resource owner password.
scope
OPTIONAL. The scope of the access request as described by
`Section 3.3`_.
.. _`Section 3.3`: http://tools.ietf.org/html/draft-ietf-oauth-v2-28#section-3.3
"""
return prepare_token_request(u'password', body=body, username=username,
password=password, scope=scope, **kwargs)
def parse_request_body_response(self, body, scope=None):
"""Parse the JSON response body.
If the access token request is valid and authorized, the
authorization server issues an access token and optional refresh
token as described in `Section 5.1`_. If the request failed client
authentication or is invalid, the authorization server returns an
error response as described in `Section 5.2`_.
.. `Section 5.1`: http://tools.ietf.org/html/draft-ietf-oauth-v2-28#section-5.1
.. `Section 5.2`: http://tools.ietf.org/html/draft-ietf-oauth-v2-28#section-5.2
"""
response = parse_token_response(body, scope=scope)
self._populate_attributes(response)
return response
class Server(object):
pass
| gpl-3.0 | 4,745,918,991,383,437,000 | 45 | 122 | 0.662759 | false |
ajayuranakar/django-blog | lib/python2.7/site-packages/setuptools/dist.py | 65 | 32132 | __all__ = ['Distribution']
import re
import sys
from distutils.core import Distribution as _Distribution
from setuptools.depends import Require
from setuptools.command.install import install
from setuptools.command.sdist import sdist
from setuptools.command.install_lib import install_lib
from setuptools.compat import numeric_types, basestring
from distutils.errors import DistutilsOptionError, DistutilsPlatformError
from distutils.errors import DistutilsSetupError
import setuptools, pkg_resources, distutils.core, distutils.dist, distutils.cmd
import os, distutils.log
def _get_unpatched(cls):
"""Protect against re-patching the distutils if reloaded
Also ensures that no other distutils extension monkeypatched the distutils
first.
"""
while cls.__module__.startswith('setuptools'):
cls, = cls.__bases__
if not cls.__module__.startswith('distutils'):
raise AssertionError(
"distutils has already been patched by %r" % cls
)
return cls
_Distribution = _get_unpatched(_Distribution)
sequence = tuple, list
def check_importable(dist, attr, value):
try:
ep = pkg_resources.EntryPoint.parse('x='+value)
assert not ep.extras
except (TypeError,ValueError,AttributeError,AssertionError):
raise DistutilsSetupError(
"%r must be importable 'module:attrs' string (got %r)"
% (attr,value)
)
def assert_string_list(dist, attr, value):
"""Verify that value is a string list or None"""
try:
assert ''.join(value)!=value
except (TypeError,ValueError,AttributeError,AssertionError):
raise DistutilsSetupError(
"%r must be a list of strings (got %r)" % (attr,value)
)
def check_nsp(dist, attr, value):
"""Verify that namespace packages are valid"""
assert_string_list(dist,attr,value)
for nsp in value:
if not dist.has_contents_for(nsp):
raise DistutilsSetupError(
"Distribution contains no modules or packages for " +
"namespace package %r" % nsp
)
if '.' in nsp:
parent = '.'.join(nsp.split('.')[:-1])
if parent not in value:
distutils.log.warn(
"WARNING: %r is declared as a package namespace, but %r"
" is not: please correct this in setup.py", nsp, parent
)
def check_extras(dist, attr, value):
"""Verify that extras_require mapping is valid"""
try:
for k,v in value.items():
if ':' in k:
k,m = k.split(':',1)
if pkg_resources.invalid_marker(m):
raise DistutilsSetupError("Invalid environment marker: "+m)
list(pkg_resources.parse_requirements(v))
except (TypeError,ValueError,AttributeError):
raise DistutilsSetupError(
"'extras_require' must be a dictionary whose values are "
"strings or lists of strings containing valid project/version "
"requirement specifiers."
)
def assert_bool(dist, attr, value):
"""Verify that value is True, False, 0, or 1"""
if bool(value) != value:
raise DistutilsSetupError(
"%r must be a boolean value (got %r)" % (attr,value)
)
def check_requirements(dist, attr, value):
"""Verify that install_requires is a valid requirements list"""
try:
list(pkg_resources.parse_requirements(value))
except (TypeError,ValueError):
raise DistutilsSetupError(
"%r must be a string or list of strings "
"containing valid project/version requirement specifiers" % (attr,)
)
def check_entry_points(dist, attr, value):
"""Verify that entry_points map is parseable"""
try:
pkg_resources.EntryPoint.parse_map(value)
except ValueError:
e = sys.exc_info()[1]
raise DistutilsSetupError(e)
def check_test_suite(dist, attr, value):
if not isinstance(value,basestring):
raise DistutilsSetupError("test_suite must be a string")
def check_package_data(dist, attr, value):
"""Verify that value is a dictionary of package names to glob lists"""
if isinstance(value,dict):
for k,v in value.items():
if not isinstance(k,str): break
try: iter(v)
except TypeError:
break
else:
return
raise DistutilsSetupError(
attr+" must be a dictionary mapping package names to lists of "
"wildcard patterns"
)
def check_packages(dist, attr, value):
for pkgname in value:
if not re.match(r'\w+(\.\w+)*', pkgname):
distutils.log.warn(
"WARNING: %r not a valid package name; please use only"
".-separated package names in setup.py", pkgname
)
class Distribution(_Distribution):
"""Distribution with support for features, tests, and package data
This is an enhanced version of 'distutils.dist.Distribution' that
effectively adds the following new optional keyword arguments to 'setup()':
'install_requires' -- a string or sequence of strings specifying project
versions that the distribution requires when installed, in the format
used by 'pkg_resources.require()'. They will be installed
automatically when the package is installed. If you wish to use
packages that are not available in PyPI, or want to give your users an
alternate download location, you can add a 'find_links' option to the
'[easy_install]' section of your project's 'setup.cfg' file, and then
setuptools will scan the listed web pages for links that satisfy the
requirements.
'extras_require' -- a dictionary mapping names of optional "extras" to the
additional requirement(s) that using those extras incurs. For example,
this::
extras_require = dict(reST = ["docutils>=0.3", "reSTedit"])
indicates that the distribution can optionally provide an extra
capability called "reST", but it can only be used if docutils and
reSTedit are installed. If the user installs your package using
EasyInstall and requests one of your extras, the corresponding
additional requirements will be installed if needed.
'features' -- a dictionary mapping option names to 'setuptools.Feature'
objects. Features are a portion of the distribution that can be
included or excluded based on user options, inter-feature dependencies,
and availability on the current system. Excluded features are omitted
from all setup commands, including source and binary distributions, so
you can create multiple distributions from the same source tree.
Feature names should be valid Python identifiers, except that they may
contain the '-' (minus) sign. Features can be included or excluded
via the command line options '--with-X' and '--without-X', where 'X' is
the name of the feature. Whether a feature is included by default, and
whether you are allowed to control this from the command line, is
determined by the Feature object. See the 'Feature' class for more
information.
'test_suite' -- the name of a test suite to run for the 'test' command.
If the user runs 'python setup.py test', the package will be installed,
and the named test suite will be run. The format is the same as
would be used on a 'unittest.py' command line. That is, it is the
dotted name of an object to import and call to generate a test suite.
'package_data' -- a dictionary mapping package names to lists of filenames
or globs to use to find data files contained in the named packages.
If the dictionary has filenames or globs listed under '""' (the empty
string), those names will be searched for in every package, in addition
to any names for the specific package. Data files found using these
names/globs will be installed along with the package, in the same
location as the package. Note that globs are allowed to reference
the contents of non-package subdirectories, as long as you use '/' as
a path separator. (Globs are automatically converted to
platform-specific paths at runtime.)
In addition to these new keywords, this class also has several new methods
for manipulating the distribution's contents. For example, the 'include()'
and 'exclude()' methods can be thought of as in-place add and subtract
commands that add or remove packages, modules, extensions, and so on from
the distribution. They are used by the feature subsystem to configure the
distribution for the included and excluded features.
"""
_patched_dist = None
def patch_missing_pkg_info(self, attrs):
# Fake up a replacement for the data that would normally come from
# PKG-INFO, but which might not yet be built if this is a fresh
# checkout.
#
if not attrs or 'name' not in attrs or 'version' not in attrs:
return
key = pkg_resources.safe_name(str(attrs['name'])).lower()
dist = pkg_resources.working_set.by_key.get(key)
if dist is not None and not dist.has_metadata('PKG-INFO'):
dist._version = pkg_resources.safe_version(str(attrs['version']))
self._patched_dist = dist
def __init__ (self, attrs=None):
have_package_data = hasattr(self, "package_data")
if not have_package_data:
self.package_data = {}
self.require_features = []
self.features = {}
self.dist_files = []
self.src_root = attrs and attrs.pop("src_root", None)
self.patch_missing_pkg_info(attrs)
# Make sure we have any eggs needed to interpret 'attrs'
if attrs is not None:
self.dependency_links = attrs.pop('dependency_links', [])
assert_string_list(self,'dependency_links',self.dependency_links)
if attrs and 'setup_requires' in attrs:
self.fetch_build_eggs(attrs.pop('setup_requires'))
for ep in pkg_resources.iter_entry_points('distutils.setup_keywords'):
if not hasattr(self,ep.name):
setattr(self,ep.name,None)
_Distribution.__init__(self,attrs)
if isinstance(self.metadata.version, numeric_types):
# Some people apparently take "version number" too literally :)
self.metadata.version = str(self.metadata.version)
def parse_command_line(self):
"""Process features after parsing command line options"""
result = _Distribution.parse_command_line(self)
if self.features:
self._finalize_features()
return result
def _feature_attrname(self,name):
"""Convert feature name to corresponding option attribute name"""
return 'with_'+name.replace('-','_')
def fetch_build_eggs(self, requires):
"""Resolve pre-setup requirements"""
from pkg_resources import working_set, parse_requirements
for dist in working_set.resolve(
parse_requirements(requires), installer=self.fetch_build_egg
):
working_set.add(dist)
def finalize_options(self):
_Distribution.finalize_options(self)
if self.features:
self._set_global_opts_from_features()
for ep in pkg_resources.iter_entry_points('distutils.setup_keywords'):
value = getattr(self,ep.name,None)
if value is not None:
ep.require(installer=self.fetch_build_egg)
ep.load()(self, ep.name, value)
if getattr(self, 'convert_2to3_doctests', None):
# XXX may convert to set here when we can rely on set being builtin
self.convert_2to3_doctests = [os.path.abspath(p) for p in self.convert_2to3_doctests]
else:
self.convert_2to3_doctests = []
def fetch_build_egg(self, req):
"""Fetch an egg needed for building"""
try:
cmd = self._egg_fetcher
cmd.package_index.to_scan = []
except AttributeError:
from setuptools.command.easy_install import easy_install
dist = self.__class__({'script_args':['easy_install']})
dist.parse_config_files()
opts = dist.get_option_dict('easy_install')
keep = (
'find_links', 'site_dirs', 'index_url', 'optimize',
'site_dirs', 'allow_hosts'
)
for key in opts.keys():
if key not in keep:
del opts[key] # don't use any other settings
if self.dependency_links:
links = self.dependency_links[:]
if 'find_links' in opts:
links = opts['find_links'][1].split() + links
opts['find_links'] = ('setup', links)
cmd = easy_install(
dist, args=["x"], install_dir=os.curdir, exclude_scripts=True,
always_copy=False, build_directory=None, editable=False,
upgrade=False, multi_version=True, no_report=True, user=False
)
cmd.ensure_finalized()
self._egg_fetcher = cmd
return cmd.easy_install(req)
def _set_global_opts_from_features(self):
"""Add --with-X/--without-X options based on optional features"""
go = []
no = self.negative_opt.copy()
for name,feature in self.features.items():
self._set_feature(name,None)
feature.validate(self)
if feature.optional:
descr = feature.description
incdef = ' (default)'
excdef=''
if not feature.include_by_default():
excdef, incdef = incdef, excdef
go.append(('with-'+name, None, 'include '+descr+incdef))
go.append(('without-'+name, None, 'exclude '+descr+excdef))
no['without-'+name] = 'with-'+name
self.global_options = self.feature_options = go + self.global_options
self.negative_opt = self.feature_negopt = no
def _finalize_features(self):
"""Add/remove features and resolve dependencies between them"""
# First, flag all the enabled items (and thus their dependencies)
for name,feature in self.features.items():
enabled = self.feature_is_included(name)
if enabled or (enabled is None and feature.include_by_default()):
feature.include_in(self)
self._set_feature(name,1)
# Then disable the rest, so that off-by-default features don't
# get flagged as errors when they're required by an enabled feature
for name,feature in self.features.items():
if not self.feature_is_included(name):
feature.exclude_from(self)
self._set_feature(name,0)
def get_command_class(self, command):
"""Pluggable version of get_command_class()"""
if command in self.cmdclass:
return self.cmdclass[command]
for ep in pkg_resources.iter_entry_points('distutils.commands',command):
ep.require(installer=self.fetch_build_egg)
self.cmdclass[command] = cmdclass = ep.load()
return cmdclass
else:
return _Distribution.get_command_class(self, command)
def print_commands(self):
for ep in pkg_resources.iter_entry_points('distutils.commands'):
if ep.name not in self.cmdclass:
cmdclass = ep.load(False) # don't require extras, we're not running
self.cmdclass[ep.name] = cmdclass
return _Distribution.print_commands(self)
def _set_feature(self,name,status):
"""Set feature's inclusion status"""
setattr(self,self._feature_attrname(name),status)
def feature_is_included(self,name):
"""Return 1 if feature is included, 0 if excluded, 'None' if unknown"""
return getattr(self,self._feature_attrname(name))
def include_feature(self,name):
"""Request inclusion of feature named 'name'"""
if self.feature_is_included(name)==0:
descr = self.features[name].description
raise DistutilsOptionError(
descr + " is required, but was excluded or is not available"
)
self.features[name].include_in(self)
self._set_feature(name,1)
def include(self,**attrs):
"""Add items to distribution that are named in keyword arguments
For example, 'dist.exclude(py_modules=["x"])' would add 'x' to
the distribution's 'py_modules' attribute, if it was not already
there.
Currently, this method only supports inclusion for attributes that are
lists or tuples. If you need to add support for adding to other
attributes in this or a subclass, you can add an '_include_X' method,
where 'X' is the name of the attribute. The method will be called with
the value passed to 'include()'. So, 'dist.include(foo={"bar":"baz"})'
will try to call 'dist._include_foo({"bar":"baz"})', which can then
handle whatever special inclusion logic is needed.
"""
for k,v in attrs.items():
include = getattr(self, '_include_'+k, None)
if include:
include(v)
else:
self._include_misc(k,v)
def exclude_package(self,package):
"""Remove packages, modules, and extensions in named package"""
pfx = package+'.'
if self.packages:
self.packages = [
p for p in self.packages
if p != package and not p.startswith(pfx)
]
if self.py_modules:
self.py_modules = [
p for p in self.py_modules
if p != package and not p.startswith(pfx)
]
if self.ext_modules:
self.ext_modules = [
p for p in self.ext_modules
if p.name != package and not p.name.startswith(pfx)
]
def has_contents_for(self,package):
"""Return true if 'exclude_package(package)' would do something"""
pfx = package+'.'
for p in self.iter_distribution_names():
if p==package or p.startswith(pfx):
return True
def _exclude_misc(self,name,value):
"""Handle 'exclude()' for list/tuple attrs without a special handler"""
if not isinstance(value,sequence):
raise DistutilsSetupError(
"%s: setting must be a list or tuple (%r)" % (name, value)
)
try:
old = getattr(self,name)
except AttributeError:
raise DistutilsSetupError(
"%s: No such distribution setting" % name
)
if old is not None and not isinstance(old,sequence):
raise DistutilsSetupError(
name+": this setting cannot be changed via include/exclude"
)
elif old:
setattr(self,name,[item for item in old if item not in value])
def _include_misc(self,name,value):
"""Handle 'include()' for list/tuple attrs without a special handler"""
if not isinstance(value,sequence):
raise DistutilsSetupError(
"%s: setting must be a list (%r)" % (name, value)
)
try:
old = getattr(self,name)
except AttributeError:
raise DistutilsSetupError(
"%s: No such distribution setting" % name
)
if old is None:
setattr(self,name,value)
elif not isinstance(old,sequence):
raise DistutilsSetupError(
name+": this setting cannot be changed via include/exclude"
)
else:
setattr(self,name,old+[item for item in value if item not in old])
def exclude(self,**attrs):
"""Remove items from distribution that are named in keyword arguments
For example, 'dist.exclude(py_modules=["x"])' would remove 'x' from
the distribution's 'py_modules' attribute. Excluding packages uses
the 'exclude_package()' method, so all of the package's contained
packages, modules, and extensions are also excluded.
Currently, this method only supports exclusion from attributes that are
lists or tuples. If you need to add support for excluding from other
attributes in this or a subclass, you can add an '_exclude_X' method,
where 'X' is the name of the attribute. The method will be called with
the value passed to 'exclude()'. So, 'dist.exclude(foo={"bar":"baz"})'
will try to call 'dist._exclude_foo({"bar":"baz"})', which can then
handle whatever special exclusion logic is needed.
"""
for k,v in attrs.items():
exclude = getattr(self, '_exclude_'+k, None)
if exclude:
exclude(v)
else:
self._exclude_misc(k,v)
def _exclude_packages(self,packages):
if not isinstance(packages,sequence):
raise DistutilsSetupError(
"packages: setting must be a list or tuple (%r)" % (packages,)
)
list(map(self.exclude_package, packages))
def _parse_command_opts(self, parser, args):
# Remove --with-X/--without-X options when processing command args
self.global_options = self.__class__.global_options
self.negative_opt = self.__class__.negative_opt
# First, expand any aliases
command = args[0]
aliases = self.get_option_dict('aliases')
while command in aliases:
src,alias = aliases[command]
del aliases[command] # ensure each alias can expand only once!
import shlex
args[:1] = shlex.split(alias,True)
command = args[0]
nargs = _Distribution._parse_command_opts(self, parser, args)
# Handle commands that want to consume all remaining arguments
cmd_class = self.get_command_class(command)
if getattr(cmd_class,'command_consumes_arguments',None):
self.get_option_dict(command)['args'] = ("command line", nargs)
if nargs is not None:
return []
return nargs
def get_cmdline_options(self):
"""Return a '{cmd: {opt:val}}' map of all command-line options
Option names are all long, but do not include the leading '--', and
contain dashes rather than underscores. If the option doesn't take
an argument (e.g. '--quiet'), the 'val' is 'None'.
Note that options provided by config files are intentionally excluded.
"""
d = {}
for cmd,opts in self.command_options.items():
for opt,(src,val) in opts.items():
if src != "command line":
continue
opt = opt.replace('_','-')
if val==0:
cmdobj = self.get_command_obj(cmd)
neg_opt = self.negative_opt.copy()
neg_opt.update(getattr(cmdobj,'negative_opt',{}))
for neg,pos in neg_opt.items():
if pos==opt:
opt=neg
val=None
break
else:
raise AssertionError("Shouldn't be able to get here")
elif val==1:
val = None
d.setdefault(cmd,{})[opt] = val
return d
def iter_distribution_names(self):
"""Yield all packages, modules, and extension names in distribution"""
for pkg in self.packages or ():
yield pkg
for module in self.py_modules or ():
yield module
for ext in self.ext_modules or ():
if isinstance(ext,tuple):
name, buildinfo = ext
else:
name = ext.name
if name.endswith('module'):
name = name[:-6]
yield name
def handle_display_options(self, option_order):
"""If there were any non-global "display-only" options
(--help-commands or the metadata display options) on the command
line, display the requested info and return true; else return
false.
"""
import sys
if sys.version_info < (3,) or self.help_commands:
return _Distribution.handle_display_options(self, option_order)
# Stdout may be StringIO (e.g. in tests)
import io
if not isinstance(sys.stdout, io.TextIOWrapper):
return _Distribution.handle_display_options(self, option_order)
# Don't wrap stdout if utf-8 is already the encoding. Provides
# workaround for #334.
if sys.stdout.encoding.lower() in ('utf-8', 'utf8'):
return _Distribution.handle_display_options(self, option_order)
# Print metadata in UTF-8 no matter the platform
encoding = sys.stdout.encoding
errors = sys.stdout.errors
newline = sys.platform != 'win32' and '\n' or None
line_buffering = sys.stdout.line_buffering
sys.stdout = io.TextIOWrapper(
sys.stdout.detach(), 'utf-8', errors, newline, line_buffering)
try:
return _Distribution.handle_display_options(self, option_order)
finally:
sys.stdout = io.TextIOWrapper(
sys.stdout.detach(), encoding, errors, newline, line_buffering)
# Install it throughout the distutils
for module in distutils.dist, distutils.core, distutils.cmd:
module.Distribution = Distribution
class Feature:
"""A subset of the distribution that can be excluded if unneeded/wanted
Features are created using these keyword arguments:
'description' -- a short, human readable description of the feature, to
be used in error messages, and option help messages.
'standard' -- if true, the feature is included by default if it is
available on the current system. Otherwise, the feature is only
included if requested via a command line '--with-X' option, or if
another included feature requires it. The default setting is 'False'.
'available' -- if true, the feature is available for installation on the
current system. The default setting is 'True'.
'optional' -- if true, the feature's inclusion can be controlled from the
command line, using the '--with-X' or '--without-X' options. If
false, the feature's inclusion status is determined automatically,
based on 'availabile', 'standard', and whether any other feature
requires it. The default setting is 'True'.
'require_features' -- a string or sequence of strings naming features
that should also be included if this feature is included. Defaults to
empty list. May also contain 'Require' objects that should be
added/removed from the distribution.
'remove' -- a string or list of strings naming packages to be removed
from the distribution if this feature is *not* included. If the
feature *is* included, this argument is ignored. This argument exists
to support removing features that "crosscut" a distribution, such as
defining a 'tests' feature that removes all the 'tests' subpackages
provided by other features. The default for this argument is an empty
list. (Note: the named package(s) or modules must exist in the base
distribution when the 'setup()' function is initially called.)
other keywords -- any other keyword arguments are saved, and passed to
the distribution's 'include()' and 'exclude()' methods when the
feature is included or excluded, respectively. So, for example, you
could pass 'packages=["a","b"]' to cause packages 'a' and 'b' to be
added or removed from the distribution as appropriate.
A feature must include at least one 'requires', 'remove', or other
keyword argument. Otherwise, it can't affect the distribution in any way.
Note also that you can subclass 'Feature' to create your own specialized
feature types that modify the distribution in other ways when included or
excluded. See the docstrings for the various methods here for more detail.
Aside from the methods, the only feature attributes that distributions look
at are 'description' and 'optional'.
"""
def __init__(self, description, standard=False, available=True,
optional=True, require_features=(), remove=(), **extras
):
self.description = description
self.standard = standard
self.available = available
self.optional = optional
if isinstance(require_features,(str,Require)):
require_features = require_features,
self.require_features = [
r for r in require_features if isinstance(r,str)
]
er = [r for r in require_features if not isinstance(r,str)]
if er: extras['require_features'] = er
if isinstance(remove,str):
remove = remove,
self.remove = remove
self.extras = extras
if not remove and not require_features and not extras:
raise DistutilsSetupError(
"Feature %s: must define 'require_features', 'remove', or at least one"
" of 'packages', 'py_modules', etc."
)
def include_by_default(self):
"""Should this feature be included by default?"""
return self.available and self.standard
def include_in(self,dist):
"""Ensure feature and its requirements are included in distribution
You may override this in a subclass to perform additional operations on
the distribution. Note that this method may be called more than once
per feature, and so should be idempotent.
"""
if not self.available:
raise DistutilsPlatformError(
self.description+" is required,"
"but is not available on this platform"
)
dist.include(**self.extras)
for f in self.require_features:
dist.include_feature(f)
def exclude_from(self,dist):
"""Ensure feature is excluded from distribution
You may override this in a subclass to perform additional operations on
the distribution. This method will be called at most once per
feature, and only after all included features have been asked to
include themselves.
"""
dist.exclude(**self.extras)
if self.remove:
for item in self.remove:
dist.exclude_package(item)
def validate(self,dist):
"""Verify that feature makes sense in context of distribution
This method is called by the distribution just before it parses its
command line. It checks to ensure that the 'remove' attribute, if any,
contains only valid package/module names that are present in the base
distribution when 'setup()' is called. You may override it in a
subclass to perform any other required validation of the feature
against a target distribution.
"""
for item in self.remove:
if not dist.has_contents_for(item):
raise DistutilsSetupError(
"%s wants to be able to remove %s, but the distribution"
" doesn't contain any packages or modules under %s"
% (self.description, item, item)
)
| gpl-3.0 | 6,818,622,924,915,607,000 | 35.022422 | 97 | 0.611944 | false |
arnif/CouchPotatoServer | libs/requests/packages/oauthlib/oauth1/rfc5849/parameters.py | 186 | 4817 | # -*- coding: utf-8 -*-
from __future__ import absolute_import
"""
oauthlib.parameters
~~~~~~~~~~~~~~~~~~~
This module contains methods related to `section 3.5`_ of the OAuth 1.0a spec.
.. _`section 3.5`: http://tools.ietf.org/html/rfc5849#section-3.5
"""
from urlparse import urlparse, urlunparse
from . import utils
from oauthlib.common import extract_params, urlencode
# TODO: do we need filter_params now that oauth_params are handled by Request?
# We can easily pass in just oauth protocol params.
@utils.filter_params
def prepare_headers(oauth_params, headers=None, realm=None):
"""**Prepare the Authorization header.**
Per `section 3.5.1`_ of the spec.
Protocol parameters can be transmitted using the HTTP "Authorization"
header field as defined by `RFC2617`_ with the auth-scheme name set to
"OAuth" (case insensitive).
For example::
Authorization: OAuth realm="Example",
oauth_consumer_key="0685bd9184jfhq22",
oauth_token="ad180jjd733klru7",
oauth_signature_method="HMAC-SHA1",
oauth_signature="wOJIO9A2W5mFwDgiDvZbTSMK%2FPY%3D",
oauth_timestamp="137131200",
oauth_nonce="4572616e48616d6d65724c61686176",
oauth_version="1.0"
.. _`section 3.5.1`: http://tools.ietf.org/html/rfc5849#section-3.5.1
.. _`RFC2617`: http://tools.ietf.org/html/rfc2617
"""
headers = headers or {}
# Protocol parameters SHALL be included in the "Authorization" header
# field as follows:
authorization_header_parameters_parts = []
for oauth_parameter_name, value in oauth_params:
# 1. Parameter names and values are encoded per Parameter Encoding
# (`Section 3.6`_)
#
# .. _`Section 3.6`: http://tools.ietf.org/html/rfc5849#section-3.6
escaped_name = utils.escape(oauth_parameter_name)
escaped_value = utils.escape(value)
# 2. Each parameter's name is immediately followed by an "=" character
# (ASCII code 61), a """ character (ASCII code 34), the parameter
# value (MAY be empty), and another """ character (ASCII code 34).
part = u'{0}="{1}"'.format(escaped_name, escaped_value)
authorization_header_parameters_parts.append(part)
# 3. Parameters are separated by a "," character (ASCII code 44) and
# OPTIONAL linear whitespace per `RFC2617`_.
#
# .. _`RFC2617`: http://tools.ietf.org/html/rfc2617
authorization_header_parameters = ', '.join(
authorization_header_parameters_parts)
# 4. The OPTIONAL "realm" parameter MAY be added and interpreted per
# `RFC2617 section 1.2`_.
#
# .. _`RFC2617 section 1.2`: http://tools.ietf.org/html/rfc2617#section-1.2
if realm:
# NOTE: realm should *not* be escaped
authorization_header_parameters = (u'realm="%s", ' % realm +
authorization_header_parameters)
# the auth-scheme name set to "OAuth" (case insensitive).
authorization_header = u'OAuth %s' % authorization_header_parameters
# contribute the Authorization header to the given headers
full_headers = {}
full_headers.update(headers)
full_headers[u'Authorization'] = authorization_header
return full_headers
def _append_params(oauth_params, params):
"""Append OAuth params to an existing set of parameters.
Both params and oauth_params is must be lists of 2-tuples.
Per `section 3.5.2`_ and `3.5.3`_ of the spec.
.. _`section 3.5.2`: http://tools.ietf.org/html/rfc5849#section-3.5.2
.. _`3.5.3`: http://tools.ietf.org/html/rfc5849#section-3.5.3
"""
merged = list(params)
merged.extend(oauth_params)
# The request URI / entity-body MAY include other request-specific
# parameters, in which case, the protocol parameters SHOULD be appended
# following the request-specific parameters, properly separated by an "&"
# character (ASCII code 38)
merged.sort(key=lambda i: i[0].startswith('oauth_'))
return merged
def prepare_form_encoded_body(oauth_params, body):
"""Prepare the Form-Encoded Body.
Per `section 3.5.2`_ of the spec.
.. _`section 3.5.2`: http://tools.ietf.org/html/rfc5849#section-3.5.2
"""
# append OAuth params to the existing body
return _append_params(oauth_params, body)
def prepare_request_uri_query(oauth_params, uri):
"""Prepare the Request URI Query.
Per `section 3.5.3`_ of the spec.
.. _`section 3.5.3`: http://tools.ietf.org/html/rfc5849#section-3.5.3
"""
# append OAuth params to the existing set of query components
sch, net, path, par, query, fra = urlparse(uri)
query = urlencode(_append_params(oauth_params, extract_params(query) or []))
return urlunparse((sch, net, path, par, query, fra))
| gpl-3.0 | -2,138,787,602,892,411,100 | 34.947761 | 80 | 0.658501 | false |
annahs/atmos_research | util_move_ptxt_data_to_mySQL_table.py | 1 | 6036 | import sys
import os
import numpy as np
from pprint import pprint
from datetime import datetime
import mysql.connector
import pickle
import math
instrument = 'UBCSP2' #'UBCSP2' #ECSP2
instrument_locn = 'WHI' #'WHI' POLAR6
#this script will run through all the .ptxt files for the WHI data and put the info into the mySQL database
#1. #alter the dates to set limits on data analysis range
start_analysis_at = datetime.strptime('20080601','%Y%m%d')
end_analysis_at = datetime.strptime('20120411','%Y%m%d')
#2. set the data_dir as the parent folder for the data, the program expects the data dir to contain subfolders named by day (eg 20120101) that contain the actual data
#note that UTC timestamping was used in the ptxt file for 2009 and for <20120412 otherwise timestamps are in PST
data_dir = 'D:/2012/WHI_UBCSP2/Binary/' #'D:/2009/WHI_ECSP2/Binary/'#'D:/2012/WHI_UBCSP2/Binary/' # 'D:/2010/WHI_ECSP2/Binary/' #'D:/2012/WHI_UBCSP2/Binary/'
os.chdir(data_dir)
parameters = {
#old calib - need this to retreive incand peak heights which I did not oiriginally write to the ptxt files
#the first three variables are the terms of the calibration fit line which gives mass in fg/count (eg. for the UBC_SP2 in 2012 use 0.20699+0.00246x-1.09254e-7x2) 2010: 0.156 + 0.00606x + 6.3931e-7x2
'BC_calib1': 0.20699 , #0 order term
'BC_calib2': 0.00246, #1st order term
'BC_calib3': - 1.09254e-7, #2nd order term
'BC_density': 1.8, #density of ambient BC in g/cc
#new - AD corrected calib with uncertinaty, 2009:(0.0172(+-0.00037)x + 0.01244(+-0.14983)) 2010:(0.01081(+-0.0002199)x - 0.32619(+-0.16465))
'BC_calib1_new': 0.24826, #0 order term
'BC_calib1_new_err': 0.05668, #0 order term +- uncertainty
'BC_calib2_new': 0.003043, #1st order term
'BC_calib2_new_err': 0.0002323, #1st order term +- uncertainty
}
#sample factor list - 2012 only
sample_factor_list_file = open('D:/2012/WHI_UBCSP2/Binary/sample_rate_changes.srpckl', 'r')
sample_factor_list = pickle.load(sample_factor_list_file)
sample_factor_list_file.close()
#database connection
cnx = mysql.connector.connect(user='root', password='Suresh15', host='localhost', database='black_carbon')
cursor = cnx.cursor()
#create insert statement variable for database updating
add_particle = ('INSERT IGNORE INTO whi_sp2_single_incand_particle_data'
'(location, instr, particle_collection_start_time,particle_collection_end_time,sample_flow,incand_pk_height,incand_sat_flag,calc_BC_mass,calc_BC_mass_LL,calc_BC_mass_UL)'
'VALUES (%(instrument_locn)s,%(instrument)s,%(particle_collection_start_time)s,%(particle_collection_end_time)s,%(sample_flow)s,%(incand_pk_height)s,%(incand_sat_flag)s,%(calc_BC_mass)s,%(calc_BC_mass_LL)s,%(calc_BC_mass_UL)s)')
#this helper method allows conversion of BC mass from a value arrived at via an old calibration to a value arrived at via a new calibration
#quad eqytn = ax2 + bx + c
def PeakHtFromMass(BC_mass,var_C,var_b,var_a):
C = var_C
b = var_b
a = var_a
c = C - BC_mass
d = b**2-4*a*c
if d < 0:
#This equation has no real solution"
return np.nan
elif d == 0:
# This equation has one solution
x = (-b+math.sqrt(b**2-4*a*c))/(2*a)
return x
else:
#This equation has two solutions
x1 = (-b+math.sqrt((b**2)-(4*(a*c))))/(2*a)
x2 = (-b-math.sqrt((b**2)-(4*(a*c))))/(2*a)
if x1 <4000:
return x1
if x2 <4000:
return x2
for directory in os.listdir(data_dir):
if os.path.isdir(directory) == True and directory.startswith('20'):
folder_date = datetime.strptime(directory, '%Y%m%d')
if start_analysis_at <= folder_date <= end_analysis_at:
current_dir = os.path.abspath(directory)
os.chdir(current_dir)
for file in os.listdir('.'):
if file.endswith('.ptxt'):
print file
f = open(file, 'r+')
f.readline()
for line in f:
newline = line.split('\t')
start_time = float(newline[0])# + 28800 #+28800 converts to UTC from PST
end_time = float(newline[1])# + 28800 #+28800 converts to UTC from PST
incand_flag = float(newline[2])
incand_sat_flag = int(newline[3])
BC_mass = float(newline[4])
if parameters['BC_calib3'] == 0:
pk_ht = (BC_mass - parameters['BC_calib1'])/parameters['BC_calib2']
else:
pk_ht = PeakHtFromMass(BC_mass, parameters['BC_calib1'], parameters['BC_calib2'], parameters['BC_calib3'])
BC_mass = parameters['BC_calib2_new']*pk_ht + parameters['BC_calib1_new']
BC_mass_LL = (parameters['BC_calib2_new']-parameters['BC_calib2_new_err'])*pk_ht + (parameters['BC_calib1_new'] - parameters['BC_calib1_new_err'])
BC_mass_UL = (parameters['BC_calib2_new']+parameters['BC_calib2_new_err'])*pk_ht + (parameters['BC_calib1_new'] + parameters['BC_calib1_new_err'])
##set correct sample factor - all of 2009 and 2010 were run at 1/1 -
##note these are in UTC
#if sample_factor_list:
# first_sample_factor_time = calendar.timegm(sample_factor_list[0][0].utctimetuple()) #get UNIX timesoatmp for first sample factor time in UTC
# #first_sample_factor_time = first_sample_factor_time - 8*3600# use if ptxt files still not UTC, ie post 20120411
#
# if start_time >= first_sample_factor_time:
# sample_factor = float(sample_factor_list[0][1])
# sample_factor_list.pop(0)
particle_data = {
'instrument': instrument,
'instrument_locn':instrument_locn,
'particle_collection_start_time': start_time,
'particle_collection_end_time': end_time,
'sample_flow': 120.,
'incand_pk_height': pk_ht,
'incand_sat_flag': incand_sat_flag,
'calc_BC_mass': BC_mass,
'calc_BC_mass_LL': BC_mass_LL,
'calc_BC_mass_UL': BC_mass_UL,
}
cursor.execute(add_particle, particle_data)
cnx.commit()
os.chdir(data_dir)
cnx.close()
| mit | -7,577,130,624,833,383,000 | 37.44586 | 242 | 0.652253 | false |
marcoitur/FreeCAD | src/Mod/Path/PathScripts/dumper_post.py | 20 | 3812 | #***************************************************************************
#* (c) sliptonic ([email protected]) 2014 *
#* *
#* This file is part of the FreeCAD CAx development system. *
#* *
#* This program is free software; you can redistribute it and/or modify *
#* it under the terms of the GNU Lesser General Public License (LGPL) *
#* as published by the Free Software Foundation; either version 2 of *
#* the License, or (at your option) any later version. *
#* for detail see the LICENCE text file. *
#* *
#* FreeCAD is distributed in the hope that it will be useful, *
#* but WITHOUT ANY WARRANTY; without even the implied warranty of *
#* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
#* GNU Lesser General Public License for more details. *
#* *
#* You should have received a copy of the GNU Library General Public *
#* License along with FreeCAD; if not, write to the Free Software *
#* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 *
#* USA *
#* *
#***************************************************************************/
'''
Dumper is an extremely simple postprocessor file for the Path workbench. It is used
to dump the command list from one or more Path objects for simple inspection. This post
doesn't do any manipulation of the path and doesn't write anything to disk. It just
shows the dialog so you can see it. Useful for debugging, but not much else.
'''
import datetime
now = datetime.datetime.now()
from PathScripts import PostUtils
SHOW_EDITOR = True
# to distinguish python built-in open function from the one declared below
if open.__module__ == '__builtin__':
pythonopen = open
def export(objectslist,filename):
output = '''(This ouput produced with the dump post processor)
(Dump is useful for inspecting the raw commands in your paths)
(but is not useful for driving machines.)
(Consider setting a default postprocessor in your project or )
(exporting your paths using a specific post that matches your machine)
'''
"called when freecad exports a list of objects"
for obj in objectslist:
if not hasattr(obj,"Path"):
print "the object " + obj.Name + " is not a path. Please select only path and Compounds."
return
print "postprocessing..."
output += parse(obj)
if SHOW_EDITOR:
dia = PostUtils.GCodeEditorDialog()
dia.editor.setText(output)
result = dia.exec_()
if result:
final = dia.editor.toPlainText()
else:
final = output
else:
final = output
print "done postprocessing."
def parse(pathobj):
out = ""
if hasattr(pathobj,"Group"): #We have a compound or project.
out += "(compound: " + pathobj.Label + ")\n"
for p in pathobj.Group:
out += parse(p)
return out
else: #parsing simple path
if not hasattr(pathobj,"Path"): #groups might contain non-path things like stock.
return out
out += "(Path: " + pathobj.Label + ")\n"
for c in pathobj.Path.Commands:
out += str(c) + "\n"
return out
print __name__ + " gcode postprocessor loaded."
| lgpl-2.1 | 7,619,847,830,826,860,000 | 39.989247 | 101 | 0.530168 | false |
cognitiveclass/edx-platform | openedx/core/djangoapps/profile_images/tests/helpers.py | 117 | 1742 | """
Helper methods for use in profile image tests.
"""
from contextlib import contextmanager
import os
from tempfile import NamedTemporaryFile
from django.core.files.uploadedfile import UploadedFile
from PIL import Image
@contextmanager
def make_image_file(dimensions=(320, 240), extension=".jpeg", force_size=None):
"""
Yields a named temporary file created with the specified image type and
options.
Note the default dimensions are unequal (not a square) ensuring that center-square
cropping logic will be exercised during tests.
The temporary file will be closed and deleted automatically upon exiting
the `with` block.
"""
image = Image.new('RGB', dimensions, "green")
image_file = NamedTemporaryFile(suffix=extension)
try:
image.save(image_file)
if force_size is not None:
image_file.seek(0, os.SEEK_END)
bytes_to_pad = force_size - image_file.tell()
# write in hunks of 256 bytes
hunk, byte_ = bytearray([0] * 256), bytearray([0])
num_hunks, remainder = divmod(bytes_to_pad, 256)
for _ in xrange(num_hunks):
image_file.write(hunk)
for _ in xrange(remainder):
image_file.write(byte_)
image_file.flush()
image_file.seek(0)
yield image_file
finally:
image_file.close()
@contextmanager
def make_uploaded_file(content_type, *a, **kw):
"""
Wrap the result of make_image_file in a django UploadedFile.
"""
with make_image_file(*a, **kw) as image_file:
yield UploadedFile(
image_file,
content_type=content_type,
size=os.path.getsize(image_file.name),
)
| agpl-3.0 | -5,679,710,942,558,775,000 | 30.672727 | 86 | 0.633754 | false |
Celthi/youtube-dl-GUI | youtube_dl/extractor/tube8.py | 32 | 3522 | from __future__ import unicode_literals
import json
import re
from .common import InfoExtractor
from ..compat import (
compat_urllib_parse_urlparse,
compat_urllib_request,
)
from ..utils import (
int_or_none,
str_to_int,
)
from ..aes import aes_decrypt_text
class Tube8IE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?tube8\.com/(?:[^/]+/)+(?P<display_id>[^/]+)/(?P<id>\d+)'
_TESTS = [
{
'url': 'http://www.tube8.com/teen/kasia-music-video/229795/',
'md5': '44bf12b98313827dd52d35b8706a4ea0',
'info_dict': {
'id': '229795',
'display_id': 'kasia-music-video',
'ext': 'mp4',
'description': 'hot teen Kasia grinding',
'uploader': 'unknown',
'title': 'Kasia music video',
'age_limit': 18,
}
},
{
'url': 'http://www.tube8.com/shemale/teen/blonde-cd-gets-kidnapped-by-two-blacks-and-punished-for-being-a-slutty-girl/19569151/',
'only_matching': True,
},
]
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
video_id = mobj.group('id')
display_id = mobj.group('display_id')
req = compat_urllib_request.Request(url)
req.add_header('Cookie', 'age_verified=1')
webpage = self._download_webpage(req, display_id)
flashvars = json.loads(self._html_search_regex(
r'var flashvars\s*=\s*({.+?})', webpage, 'flashvars'))
video_url = flashvars['video_url']
if flashvars.get('encrypted') is True:
video_url = aes_decrypt_text(video_url, flashvars['video_title'], 32).decode('utf-8')
path = compat_urllib_parse_urlparse(video_url).path
format_id = '-'.join(path.split('/')[4].split('_')[:2])
thumbnail = flashvars.get('image_url')
title = self._html_search_regex(
r'videotitle\s*=\s*"([^"]+)', webpage, 'title')
description = self._html_search_regex(
r'>Description:</strong>(.+?)<', webpage, 'description', fatal=False)
uploader = self._html_search_regex(
r'<strong class="video-username">(?:<a href="[^"]+">)?([^<]+)(?:</a>)?</strong>',
webpage, 'uploader', fatal=False)
like_count = int_or_none(self._html_search_regex(
r"rupVar\s*=\s*'(\d+)'", webpage, 'like count', fatal=False))
dislike_count = int_or_none(self._html_search_regex(
r"rdownVar\s*=\s*'(\d+)'", webpage, 'dislike count', fatal=False))
view_count = self._html_search_regex(
r'<strong>Views: </strong>([\d,\.]+)</li>', webpage, 'view count', fatal=False)
if view_count:
view_count = str_to_int(view_count)
comment_count = self._html_search_regex(
r'<span id="allCommentsCount">(\d+)</span>', webpage, 'comment count', fatal=False)
if comment_count:
comment_count = str_to_int(comment_count)
return {
'id': video_id,
'display_id': display_id,
'url': video_url,
'title': title,
'description': description,
'thumbnail': thumbnail,
'uploader': uploader,
'format_id': format_id,
'view_count': view_count,
'like_count': like_count,
'dislike_count': dislike_count,
'comment_count': comment_count,
'age_limit': 18,
}
| mit | -4,102,104,422,603,856,400 | 36.073684 | 141 | 0.536059 | false |
KohlsTechnology/ansible | lib/ansible/modules/network/avi/avi_vsvip.py | 26 | 4732 | #!/usr/bin/python
#
# @author: Gaurav Rastogi ([email protected])
# Eric Anderson ([email protected])
# module_check: supported
# Avi Version: 17.1.2
#
# Copyright: (c) 2017 Gaurav Rastogi, <[email protected]>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: avi_vsvip
author: Gaurav Rastogi ([email protected])
short_description: Module for setup of VsVip Avi RESTful Object
description:
- This module is used to configure VsVip object
- more examples at U(https://github.com/avinetworks/devops)
requirements: [ avisdk ]
version_added: "2.4"
options:
state:
description:
- The state that should be applied on the entity.
default: present
choices: ["absent", "present"]
avi_api_update_method:
description:
- Default method for object update is HTTP PUT.
- Setting to patch will override that behavior to use HTTP PATCH.
version_added: "2.5"
default: put
choices: ["put", "patch"]
avi_api_patch_op:
description:
- Patch operation to use when using avi_api_update_method as patch.
version_added: "2.5"
choices: ["add", "replace", "delete"]
cloud_ref:
description:
- It is a reference to an object of type cloud.
- Field introduced in 17.1.1.
dns_info:
description:
- Service discovery specific data including fully qualified domain name, type and time-to-live of the dns record.
- Field introduced in 17.1.1.
east_west_placement:
description:
- Force placement on all service engines in the service engine group (container clouds only).
- Field introduced in 17.1.1.
- Default value when not specified in API or module is interpreted by Avi Controller as False.
type: bool
name:
description:
- Name for the vsvip object.
- Field introduced in 17.1.1.
required: true
tenant_ref:
description:
- It is a reference to an object of type tenant.
- Field introduced in 17.1.1.
url:
description:
- Avi controller URL of the object.
uuid:
description:
- Uuid of the vsvip object.
- Field introduced in 17.1.1.
vip:
description:
- List of virtual service ips and other shareable entities.
- Field introduced in 17.1.1.
vrf_context_ref:
description:
- Virtual routing context that the virtual service is bound to.
- This is used to provide the isolation of the set of networks the application is attached to.
- It is a reference to an object of type vrfcontext.
- Field introduced in 17.1.1.
extends_documentation_fragment:
- avi
'''
EXAMPLES = """
- name: Example to create VsVip object
avi_vsvip:
controller: 10.10.25.42
username: admin
password: something
state: present
name: sample_vsvip
"""
RETURN = '''
obj:
description: VsVip (api/vsvip) object
returned: success, changed
type: dict
'''
from ansible.module_utils.basic import AnsibleModule
try:
from ansible.module_utils.network.avi.avi import (
avi_common_argument_spec, HAS_AVI, avi_ansible_api)
except ImportError:
HAS_AVI = False
def main():
argument_specs = dict(
state=dict(default='present',
choices=['absent', 'present']),
avi_api_update_method=dict(default='put',
choices=['put', 'patch']),
avi_api_patch_op=dict(choices=['add', 'replace', 'delete']),
cloud_ref=dict(type='str',),
dns_info=dict(type='list',),
east_west_placement=dict(type='bool',),
name=dict(type='str', required=True),
tenant_ref=dict(type='str',),
url=dict(type='str',),
uuid=dict(type='str',),
vip=dict(type='list',),
vrf_context_ref=dict(type='str',),
)
argument_specs.update(avi_common_argument_spec())
module = AnsibleModule(
argument_spec=argument_specs, supports_check_mode=True)
if not HAS_AVI:
return module.fail_json(msg=(
'Avi python API SDK (avisdk>=17.1) is not installed. '
'For more details visit https://github.com/avinetworks/sdk.'))
return avi_ansible_api(module, 'vsvip',
set([]))
if __name__ == '__main__':
main()
| gpl-3.0 | 3,743,106,047,798,852,000 | 32.323944 | 125 | 0.603339 | false |
haeusser/tensorflow | tensorflow/contrib/layers/python/layers/regularizers.py | 63 | 6978 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Regularizers for use with layers."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numbers
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn
from tensorflow.python.ops import standard_ops
from tensorflow.python.platform import tf_logging as logging
__all__ = ['l1_regularizer',
'l2_regularizer',
'l1_l2_regularizer',
'sum_regularizer',
'apply_regularization']
def l1_regularizer(scale, scope=None):
"""Returns a function that can be used to apply L1 regularization to weights.
L1 regularization encourages sparsity.
Args:
scale: A scalar multiplier `Tensor`. 0.0 disables the regularizer.
scope: An optional scope name.
Returns:
A function with signature `l1(weights)` that apply L1 regularization.
Raises:
ValueError: If scale is negative or if scale is not a float.
"""
if isinstance(scale, numbers.Integral):
raise ValueError('scale cannot be an integer: %s' % scale)
if isinstance(scale, numbers.Real):
if scale < 0.:
raise ValueError('Setting a scale less than 0 on a regularizer: %g' %
scale)
if scale == 0.:
logging.info('Scale of 0 disables regularizer.')
return lambda _: None
def l1(weights, name=None):
"""Applies L1 regularization to weights."""
with ops.name_scope(scope, 'l1_regularizer', [weights]) as name:
my_scale = ops.convert_to_tensor(scale,
dtype=weights.dtype.base_dtype,
name='scale')
return standard_ops.multiply(
my_scale,
standard_ops.reduce_sum(standard_ops.abs(weights)),
name=name)
return l1
def l2_regularizer(scale, scope=None):
"""Returns a function that can be used to apply L2 regularization to weights.
Small values of L2 can help prevent overfitting the training data.
Args:
scale: A scalar multiplier `Tensor`. 0.0 disables the regularizer.
scope: An optional scope name.
Returns:
A function with signature `l2(weights)` that applies L2 regularization.
Raises:
ValueError: If scale is negative or if scale is not a float.
"""
if isinstance(scale, numbers.Integral):
raise ValueError('scale cannot be an integer: %s' % (scale,))
if isinstance(scale, numbers.Real):
if scale < 0.:
raise ValueError('Setting a scale less than 0 on a regularizer: %g.' %
scale)
if scale == 0.:
logging.info('Scale of 0 disables regularizer.')
return lambda _: None
def l2(weights):
"""Applies l2 regularization to weights."""
with ops.name_scope(scope, 'l2_regularizer', [weights]) as name:
my_scale = ops.convert_to_tensor(scale,
dtype=weights.dtype.base_dtype,
name='scale')
return standard_ops.multiply(my_scale, nn.l2_loss(weights), name=name)
return l2
def l1_l2_regularizer(scale_l1=1.0, scale_l2=1.0, scope=None):
"""Returns a function that can be used to apply L1 L2 regularizations.
Args:
scale_l1: A scalar multiplier `Tensor` for L1 regularization.
scale_l2: A scalar multiplier `Tensor` for L2 regularization.
scope: An optional scope name.
Returns:
A function with signature `l1_l2(weights)` that applies a weighted sum of
L1 L2 regularization.
Raises:
ValueError: If scale is negative or if scale is not a float.
"""
scope = scope or 'l1_l2_regularizer'
return sum_regularizer([l1_regularizer(scale_l1),
l2_regularizer(scale_l2)],
scope=scope)
def sum_regularizer(regularizer_list, scope=None):
"""Returns a function that applies the sum of multiple regularizers.
Args:
regularizer_list: A list of regularizers to apply.
scope: An optional scope name
Returns:
A function with signature `sum_reg(weights)` that applies the
sum of all the input regularizers.
"""
regularizer_list = [reg for reg in regularizer_list if reg is not None]
if not regularizer_list:
return None
def sum_reg(weights):
"""Applies the sum of all the input regularizers."""
with ops.name_scope(scope, 'sum_regularizer', [weights]) as name:
regularizer_tensors = [reg(weights) for reg in regularizer_list]
return math_ops.add_n(regularizer_tensors, name=name)
return sum_reg
def apply_regularization(regularizer, weights_list=None):
"""Returns the summed penalty by applying `regularizer` to the `weights_list`.
Adding a regularization penalty over the layer weights and embedding weights
can help prevent overfitting the training data. Regularization over layer
biases is less common/useful, but assuming proper data preprocessing/mean
subtraction, it usually shouldn't hurt much either.
Args:
regularizer: A function that takes a single `Tensor` argument and returns
a scalar `Tensor` output.
weights_list: List of weights `Tensors` or `Variables` to apply
`regularizer` over. Defaults to the `GraphKeys.WEIGHTS` collection if
`None`.
Returns:
A scalar representing the overall regularization penalty.
Raises:
ValueError: If `regularizer` does not return a scalar output, or if we find
no weights.
"""
if not weights_list:
weights_list = ops.get_collection(ops.GraphKeys.WEIGHTS)
if not weights_list:
raise ValueError('No weights to regularize.')
with ops.name_scope('get_regularization_penalty',
values=weights_list) as scope:
penalties = [regularizer(w) for w in weights_list]
penalties = [
p if p is not None else constant_op.constant(0.0) for p in penalties
]
for p in penalties:
if p.get_shape().ndims != 0:
raise ValueError('regularizer must return a scalar Tensor instead of a '
'Tensor with rank %d.' % p.get_shape().ndims)
summed_penalty = math_ops.add_n(penalties, name=scope)
ops.add_to_collection(ops.GraphKeys.REGULARIZATION_LOSSES, summed_penalty)
return summed_penalty
| apache-2.0 | -6,584,794,799,620,627,000 | 34.602041 | 80 | 0.672399 | false |
2uller/LotF | App/Lib/sqlite3/test/userfunctions.py | 2 | 15477 | #-*- coding: ISO-8859-1 -*-
# pysqlite2/test/userfunctions.py: tests for user-defined functions and
# aggregates.
#
# Copyright (C) 2005-2007 Gerhard HΓ€ring <[email protected]>
#
# This file is part of pysqlite.
#
# This software is provided 'as-is', without any express or implied
# warranty. In no event will the authors be held liable for any damages
# arising from the use of this software.
#
# Permission is granted to anyone to use this software for any purpose,
# including commercial applications, and to alter it and redistribute it
# freely, subject to the following restrictions:
#
# 1. The origin of this software must not be misrepresented; you must not
# claim that you wrote the original software. If you use this software
# in a product, an acknowledgment in the product documentation would be
# appreciated but is not required.
# 2. Altered source versions must be plainly marked as such, and must not be
# misrepresented as being the original software.
# 3. This notice may not be removed or altered from any source distribution.
import unittest
import sqlite3 as sqlite
def func_returntext():
return "foo"
def func_returnunicode():
return u"bar"
def func_returnint():
return 42
def func_returnfloat():
return 3.14
def func_returnnull():
return None
def func_returnblob():
return buffer("blob")
def func_returnlonglong():
return 1<<31
def func_raiseexception():
5 // 0
def func_isstring(v):
return type(v) is unicode
def func_isint(v):
return type(v) is int
def func_isfloat(v):
return type(v) is float
def func_isnone(v):
return type(v) is type(None)
def func_isblob(v):
return type(v) is buffer
def func_islonglong(v):
return isinstance(v, (int, long)) and v >= 1<<31
class AggrNoStep:
def __init__(self):
pass
def finalize(self):
return 1
class AggrNoFinalize:
def __init__(self):
pass
def step(self, x):
pass
class AggrExceptionInInit:
def __init__(self):
5 // 0
def step(self, x):
pass
def finalize(self):
pass
class AggrExceptionInStep:
def __init__(self):
pass
def step(self, x):
5 // 0
def finalize(self):
return 42
class AggrExceptionInFinalize:
def __init__(self):
pass
def step(self, x):
pass
def finalize(self):
5 // 0
class AggrCheckType:
def __init__(self):
self.val = None
def step(self, whichType, val):
theType = {"str": unicode, "int": int, "float": float, "None": type(None), "blob": buffer}
self.val = int(theType[whichType] is type(val))
def finalize(self):
return self.val
class AggrSum:
def __init__(self):
self.val = 0.0
def step(self, val):
self.val += val
def finalize(self):
return self.val
class FunctionTests(unittest.TestCase):
def setUp(self):
self.con = sqlite.connect(":memory:")
self.con.create_function("returntext", 0, func_returntext)
self.con.create_function("returnunicode", 0, func_returnunicode)
self.con.create_function("returnint", 0, func_returnint)
self.con.create_function("returnfloat", 0, func_returnfloat)
self.con.create_function("returnnull", 0, func_returnnull)
self.con.create_function("returnblob", 0, func_returnblob)
self.con.create_function("returnlonglong", 0, func_returnlonglong)
self.con.create_function("raiseexception", 0, func_raiseexception)
self.con.create_function("isstring", 1, func_isstring)
self.con.create_function("isint", 1, func_isint)
self.con.create_function("isfloat", 1, func_isfloat)
self.con.create_function("isnone", 1, func_isnone)
self.con.create_function("isblob", 1, func_isblob)
self.con.create_function("islonglong", 1, func_islonglong)
def tearDown(self):
self.con.close()
def CheckFuncErrorOnCreate(self):
try:
self.con.create_function("bla", -100, lambda x: 2*x)
self.fail("should have raised an OperationalError")
except sqlite.OperationalError:
pass
def CheckFuncRefCount(self):
def getfunc():
def f():
return 1
return f
f = getfunc()
globals()["foo"] = f
# self.con.create_function("reftest", 0, getfunc())
self.con.create_function("reftest", 0, f)
cur = self.con.cursor()
cur.execute("select reftest()")
def CheckFuncReturnText(self):
cur = self.con.cursor()
cur.execute("select returntext()")
val = cur.fetchone()[0]
self.assertEqual(type(val), unicode)
self.assertEqual(val, "foo")
def CheckFuncReturnUnicode(self):
cur = self.con.cursor()
cur.execute("select returnunicode()")
val = cur.fetchone()[0]
self.assertEqual(type(val), unicode)
self.assertEqual(val, u"bar")
def CheckFuncReturnInt(self):
cur = self.con.cursor()
cur.execute("select returnint()")
val = cur.fetchone()[0]
self.assertEqual(type(val), int)
self.assertEqual(val, 42)
def CheckFuncReturnFloat(self):
cur = self.con.cursor()
cur.execute("select returnfloat()")
val = cur.fetchone()[0]
self.assertEqual(type(val), float)
if val < 3.139 or val > 3.141:
self.fail("wrong value")
def CheckFuncReturnNull(self):
cur = self.con.cursor()
cur.execute("select returnnull()")
val = cur.fetchone()[0]
self.assertEqual(type(val), type(None))
self.assertEqual(val, None)
def CheckFuncReturnBlob(self):
cur = self.con.cursor()
cur.execute("select returnblob()")
val = cur.fetchone()[0]
self.assertEqual(type(val), buffer)
self.assertEqual(val, buffer("blob"))
def CheckFuncReturnLongLong(self):
cur = self.con.cursor()
cur.execute("select returnlonglong()")
val = cur.fetchone()[0]
self.assertEqual(val, 1<<31)
def CheckFuncException(self):
cur = self.con.cursor()
try:
cur.execute("select raiseexception()")
cur.fetchone()
self.fail("should have raised OperationalError")
except sqlite.OperationalError, e:
self.assertEqual(e.args[0], 'user-defined function raised exception')
def CheckParamString(self):
cur = self.con.cursor()
cur.execute("select isstring(?)", ("foo",))
val = cur.fetchone()[0]
self.assertEqual(val, 1)
def CheckParamInt(self):
cur = self.con.cursor()
cur.execute("select isint(?)", (42,))
val = cur.fetchone()[0]
self.assertEqual(val, 1)
def CheckParamFloat(self):
cur = self.con.cursor()
cur.execute("select isfloat(?)", (3.14,))
val = cur.fetchone()[0]
self.assertEqual(val, 1)
def CheckParamNone(self):
cur = self.con.cursor()
cur.execute("select isnone(?)", (None,))
val = cur.fetchone()[0]
self.assertEqual(val, 1)
def CheckParamBlob(self):
cur = self.con.cursor()
cur.execute("select isblob(?)", (buffer("blob"),))
val = cur.fetchone()[0]
self.assertEqual(val, 1)
def CheckParamLongLong(self):
cur = self.con.cursor()
cur.execute("select islonglong(?)", (1<<42,))
val = cur.fetchone()[0]
self.assertEqual(val, 1)
class AggregateTests(unittest.TestCase):
def setUp(self):
self.con = sqlite.connect(":memory:")
cur = self.con.cursor()
cur.execute("""
create table test(
t text,
i integer,
f float,
n,
b blob
)
""")
cur.execute("insert into test(t, i, f, n, b) values (?, ?, ?, ?, ?)",
("foo", 5, 3.14, None, buffer("blob"),))
self.con.create_aggregate("nostep", 1, AggrNoStep)
self.con.create_aggregate("nofinalize", 1, AggrNoFinalize)
self.con.create_aggregate("excInit", 1, AggrExceptionInInit)
self.con.create_aggregate("excStep", 1, AggrExceptionInStep)
self.con.create_aggregate("excFinalize", 1, AggrExceptionInFinalize)
self.con.create_aggregate("checkType", 2, AggrCheckType)
self.con.create_aggregate("mysum", 1, AggrSum)
def tearDown(self):
#self.cur.close()
#self.con.close()
pass
def CheckAggrErrorOnCreate(self):
try:
self.con.create_function("bla", -100, AggrSum)
self.fail("should have raised an OperationalError")
except sqlite.OperationalError:
pass
def CheckAggrNoStep(self):
cur = self.con.cursor()
try:
cur.execute("select nostep(t) from test")
self.fail("should have raised an AttributeError")
except AttributeError, e:
self.assertEqual(e.args[0], "AggrNoStep instance has no attribute 'step'")
def CheckAggrNoFinalize(self):
cur = self.con.cursor()
try:
cur.execute("select nofinalize(t) from test")
val = cur.fetchone()[0]
self.fail("should have raised an OperationalError")
except sqlite.OperationalError, e:
self.assertEqual(e.args[0], "user-defined aggregate's 'finalize' method raised error")
def CheckAggrExceptionInInit(self):
cur = self.con.cursor()
try:
cur.execute("select excInit(t) from test")
val = cur.fetchone()[0]
self.fail("should have raised an OperationalError")
except sqlite.OperationalError, e:
self.assertEqual(e.args[0], "user-defined aggregate's '__init__' method raised error")
def CheckAggrExceptionInStep(self):
cur = self.con.cursor()
try:
cur.execute("select excStep(t) from test")
val = cur.fetchone()[0]
self.fail("should have raised an OperationalError")
except sqlite.OperationalError, e:
self.assertEqual(e.args[0], "user-defined aggregate's 'step' method raised error")
def CheckAggrExceptionInFinalize(self):
cur = self.con.cursor()
try:
cur.execute("select excFinalize(t) from test")
val = cur.fetchone()[0]
self.fail("should have raised an OperationalError")
except sqlite.OperationalError, e:
self.assertEqual(e.args[0], "user-defined aggregate's 'finalize' method raised error")
def CheckAggrCheckParamStr(self):
cur = self.con.cursor()
cur.execute("select checkType('str', ?)", ("foo",))
val = cur.fetchone()[0]
self.assertEqual(val, 1)
def CheckAggrCheckParamInt(self):
cur = self.con.cursor()
cur.execute("select checkType('int', ?)", (42,))
val = cur.fetchone()[0]
self.assertEqual(val, 1)
def CheckAggrCheckParamFloat(self):
cur = self.con.cursor()
cur.execute("select checkType('float', ?)", (3.14,))
val = cur.fetchone()[0]
self.assertEqual(val, 1)
def CheckAggrCheckParamNone(self):
cur = self.con.cursor()
cur.execute("select checkType('None', ?)", (None,))
val = cur.fetchone()[0]
self.assertEqual(val, 1)
def CheckAggrCheckParamBlob(self):
cur = self.con.cursor()
cur.execute("select checkType('blob', ?)", (buffer("blob"),))
val = cur.fetchone()[0]
self.assertEqual(val, 1)
def CheckAggrCheckAggrSum(self):
cur = self.con.cursor()
cur.execute("delete from test")
cur.executemany("insert into test(i) values (?)", [(10,), (20,), (30,)])
cur.execute("select mysum(i) from test")
val = cur.fetchone()[0]
self.assertEqual(val, 60)
class AuthorizerTests(unittest.TestCase):
@staticmethod
def authorizer_cb(action, arg1, arg2, dbname, source):
if action != sqlite.SQLITE_SELECT:
return sqlite.SQLITE_DENY
if arg2 == 'c2' or arg1 == 't2':
return sqlite.SQLITE_DENY
return sqlite.SQLITE_OK
def setUp(self):
self.con = sqlite.connect(":memory:")
self.con.executescript("""
create table t1 (c1, c2);
create table t2 (c1, c2);
insert into t1 (c1, c2) values (1, 2);
insert into t2 (c1, c2) values (4, 5);
""")
# For our security test:
self.con.execute("select c2 from t2")
self.con.set_authorizer(self.authorizer_cb)
def tearDown(self):
pass
def test_table_access(self):
try:
self.con.execute("select * from t2")
except sqlite.DatabaseError, e:
if not e.args[0].endswith("prohibited"):
self.fail("wrong exception text: %s" % e.args[0])
return
self.fail("should have raised an exception due to missing privileges")
def test_column_access(self):
try:
self.con.execute("select c2 from t1")
except sqlite.DatabaseError, e:
if not e.args[0].endswith("prohibited"):
self.fail("wrong exception text: %s" % e.args[0])
return
self.fail("should have raised an exception due to missing privileges")
class AuthorizerRaiseExceptionTests(AuthorizerTests):
@staticmethod
def authorizer_cb(action, arg1, arg2, dbname, source):
if action != sqlite.SQLITE_SELECT:
raise ValueError
if arg2 == 'c2' or arg1 == 't2':
raise ValueError
return sqlite.SQLITE_OK
class AuthorizerIllegalTypeTests(AuthorizerTests):
@staticmethod
def authorizer_cb(action, arg1, arg2, dbname, source):
if action != sqlite.SQLITE_SELECT:
return 0.0
if arg2 == 'c2' or arg1 == 't2':
return 0.0
return sqlite.SQLITE_OK
class AuthorizerLargeIntegerTests(AuthorizerTests):
@staticmethod
def authorizer_cb(action, arg1, arg2, dbname, source):
if action != sqlite.SQLITE_SELECT:
return 2**32
if arg2 == 'c2' or arg1 == 't2':
return 2**32
return sqlite.SQLITE_OK
def suite():
function_suite = unittest.makeSuite(FunctionTests, "Check")
aggregate_suite = unittest.makeSuite(AggregateTests, "Check")
authorizer_suite = unittest.makeSuite(AuthorizerTests)
return unittest.TestSuite((
function_suite,
aggregate_suite,
authorizer_suite,
unittest.makeSuite(AuthorizerRaiseExceptionTests),
unittest.makeSuite(AuthorizerIllegalTypeTests),
unittest.makeSuite(AuthorizerLargeIntegerTests),
))
def test():
runner = unittest.TextTestRunner()
runner.run(suite())
if __name__ == "__main__":
test()
| gpl-2.0 | -3,292,085,302,704,013,000 | 31.141328 | 98 | 0.582865 | false |
anjalisood/spark-tk | python/sparktk/dicom/ops/filter_by_keywords.py | 14 | 3974 | # vim: set encoding=utf-8
# CopyrightΒ (c)Β 2016 IntelΒ CorporationΒ
#
# LicensedΒ underΒ theΒ ApacheΒ License,Β VersionΒ 2.0Β (theΒ "License");
# youΒ mayΒ notΒ useΒ thisΒ fileΒ exceptΒ inΒ complianceΒ withΒ theΒ License.
# YouΒ mayΒ obtainΒ aΒ copyΒ ofΒ theΒ LicenseΒ at
#
# Β Β Β Β Β http://www.apache.org/licenses/LICENSE-2.0
#
# UnlessΒ requiredΒ byΒ applicableΒ lawΒ orΒ agreedΒ toΒ inΒ writing,Β software
# distributedΒ underΒ theΒ LicenseΒ isΒ distributedΒ onΒ anΒ "ASΒ IS"Β BASIS,
# WITHOUTΒ WARRANTIESΒ ORΒ CONDITIONSΒ OFΒ ANYΒ KIND,Β eitherΒ expressΒ orΒ implied.
# SeeΒ theΒ LicenseΒ forΒ theΒ specificΒ languageΒ governingΒ permissionsΒ and
# limitationsΒ underΒ theΒ License.
#
def filter_by_keywords(self, keywords_values_dict):
"""
Filter the rows based on dictionary of {"keyword":"value"}(applying 'and' operation on dictionary) from column holding xml string
Ex: keywords_values_dict -> {"SOPInstanceUID":"1.3.6.1.4.1.14519.5.2.1.7308.2101.234736319276602547946349519685", "Manufacturer":"SIEMENS", "StudyDate":"20030315"}
Parameters
----------
:param keywords_values_dict: (dict(str, str)) dictionary of keywords and values from xml string in metadata
Examples
--------
>>> dicom_path = "../datasets/dicom_uncompressed"
>>> dicom = tc.dicom.import_dcm(dicom_path)
>>> dicom.metadata.count()
3
<skip>
>>> dicom.metadata.inspect(truncate=30)
[#] id metadata
=======================================
[0] 0 <?xml version="1.0" encodin...
[1] 1 <?xml version="1.0" encodin...
[2] 2 <?xml version="1.0" encodin...
</skip>
#Part of xml string looks as below
<?xml version="1.0" encoding="UTF-8"?>
<NativeDicomModel xml:space="preserve">
<DicomAttribute keyword="FileMetaInformationVersion" tag="00020001" vr="OB"><InlineBinary>AAE=</InlineBinary></DicomAttribute>
<DicomAttribute keyword="MediaStorageSOPClassUID" tag="00020002" vr="UI"><Value number="1">1.2.840.10008.5.1.4.1.1.4</Value></DicomAttribute>
<DicomAttribute keyword="MediaStorageSOPInstanceUID" tag="00020003" vr="UI"><Value number="1">1.3.6.1.4.1.14519.5.2.1.7308.2101.234736319276602547946349519685</Value></DicomAttribute>
...
>>> keywords_values_dict = {"SOPInstanceUID":"1.3.6.1.4.1.14519.5.2.1.7308.2101.234736319276602547946349519685", "Manufacturer":"SIEMENS", "StudyDate":"20030315"}
>>> dicom.filter_by_keywords(keywords_values_dict)
>>> dicom.metadata.count()
1
<skip>
#After filter
>>> dicom.metadata.inspect(truncate=30)
[#] id metadata
=======================================
[0] 0 <?xml version="1.0" encodin...
>>> dicom.pixeldata.inspect(truncate=30)
[#] id imagematrix
=====================================================
[0] 0 [[ 0. 0. 0. ..., 0. 0. 0.]
[ 0. 125. 103. ..., 120. 213. 319.]
[ 0. 117. 94. ..., 135. 223. 325.]
...,
[ 0. 62. 21. ..., 896. 886. 854.]
[ 0. 63. 23. ..., 941. 872. 897.]
[ 0. 60. 30. ..., 951. 822. 906.]]
</skip>
"""
if not isinstance(keywords_values_dict, dict):
raise TypeError("keywords_values_dict should be a type of dict, but found type as %" % type(keywords_values_dict))
for key, value in keywords_values_dict.iteritems():
if not isinstance(key, basestring) or not isinstance(value, basestring):
raise TypeError("both keyword and value should be of <type 'str'>")
#Always scala dicom is invoked, as python joins are expensive compared to serailizations.
def f(scala_dicom):
scala_dicom.filterByKeywords(self._tc.jutils.convert.to_scala_map(keywords_values_dict))
self._call_scala(f) | apache-2.0 | -4,249,756,554,954,055,700 | 40.021053 | 199 | 0.588552 | false |
DDelon/youtube-dl | youtube_dl/extractor/phoenix.py | 108 | 1527 | from __future__ import unicode_literals
from .common import InfoExtractor
from .zdf import extract_from_xml_url
class PhoenixIE(InfoExtractor):
_VALID_URL = r'''(?x)https?://(?:www\.)?phoenix\.de/content/
(?:
phoenix/die_sendungen/(?:[^/]+/)?
)?
(?P<id>[0-9]+)'''
_TESTS = [
{
'url': 'http://www.phoenix.de/content/884301',
'md5': 'ed249f045256150c92e72dbb70eadec6',
'info_dict': {
'id': '884301',
'ext': 'mp4',
'title': 'Michael Krons mit Hans-Werner Sinn',
'description': 'Im Dialog - Sa. 25.10.14, 00.00 - 00.35 Uhr',
'upload_date': '20141025',
'uploader': 'Im Dialog',
}
},
{
'url': 'http://www.phoenix.de/content/phoenix/die_sendungen/869815',
'only_matching': True,
},
{
'url': 'http://www.phoenix.de/content/phoenix/die_sendungen/diskussionen/928234',
'only_matching': True,
},
]
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(url, video_id)
internal_id = self._search_regex(
r'<div class="phx_vod" id="phx_vod_([0-9]+)"',
webpage, 'internal video ID')
api_url = 'http://www.phoenix.de/php/zdfplayer-v1.3/data/beitragsDetails.php?ak=web&id=%s' % internal_id
return extract_from_xml_url(self, video_id, api_url)
| unlicense | -946,064,579,181,267,100 | 32.933333 | 112 | 0.516045 | false |
metaml/nupic | tests/integration/nupic/algorithms/knn_classifier_test/classifier_test.py | 25 | 10208 | #!/usr/bin/env python
# ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2014, Numenta, Inc. Unless you have purchased from
# Numenta, Inc. a separate commercial license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
import copy
import logging
import time
import unittest2 as unittest
import cPickle
import numpy
from nupic.regions.PyRegion import RealNumpyDType
from nupic.algorithms.KNNClassifier import KNNClassifier
import pca_knn_data
LOGGER = logging.getLogger(__name__)
class KNNClassifierTest(unittest.TestCase):
"""Tests for k Nearest Neighbor classifier"""
def runTestKNNClassifier(self, short = 0):
""" Test the KNN classifier in this module. short can be:
0 (short), 1 (medium), or 2 (long)
"""
failures = ""
if short != 2:
numpy.random.seed(42)
else:
seed_value = int(time.time())
numpy.random.seed(seed_value)
LOGGER.info('Seed used: %d', seed_value)
f = open('seedval', 'a')
f.write(str(seed_value))
f.write('\n')
f.close()
failures += simulateKMoreThanOne()
LOGGER.info("\nTesting KNN Classifier on dense patterns")
numPatterns, numClasses = getNumTestPatterns(short)
patternSize = 100
patterns = numpy.random.rand(numPatterns, patternSize)
patternDict = dict()
testDict = dict()
# Assume there are no repeated patterns -- if there are, then
# numpy.random would be completely broken.
# Patterns in testDict are identical to those in patternDict but for the
# first 2% of items.
for i in xrange(numPatterns):
patternDict[i] = dict()
patternDict[i]['pattern'] = patterns[i]
patternDict[i]['category'] = numpy.random.randint(0, numClasses-1)
testDict[i] = copy.deepcopy(patternDict[i])
testDict[i]['pattern'][:0.02*patternSize] = numpy.random.rand()
testDict[i]['category'] = None
LOGGER.info("\nTesting KNN Classifier with L2 norm")
knn = KNNClassifier(k=1)
failures += simulateClassifier(knn, patternDict, \
"KNN Classifier with L2 norm test")
LOGGER.info("\nTesting KNN Classifier with L1 norm")
knnL1 = KNNClassifier(k=1, distanceNorm=1.0)
failures += simulateClassifier(knnL1, patternDict, \
"KNN Classifier with L1 norm test")
# Test with exact matching classifications.
LOGGER.info("\nTesting KNN Classifier with exact matching. For testing we "
"slightly alter the training data and expect None to be returned for the "
"classifications.")
knnExact = KNNClassifier(k=1, exact=True)
failures += simulateClassifier(knnExact,
patternDict,
"KNN Classifier with exact matching test",
testDict=testDict)
numPatterns, numClasses = getNumTestPatterns(short)
patterns = (numpy.random.rand(numPatterns, 25) > 0.7).astype(RealNumpyDType)
patternDict = dict()
for i in patterns:
iString = str(i.tolist())
if not patternDict.has_key(iString):
randCategory = numpy.random.randint(0, numClasses-1)
patternDict[iString] = dict()
patternDict[iString]['pattern'] = i
patternDict[iString]['category'] = randCategory
LOGGER.info("\nTesting KNN on sparse patterns")
knnDense = KNNClassifier(k=1)
failures += simulateClassifier(knnDense, patternDict, \
"KNN Classifier on sparse pattern test")
self.assertEqual(len(failures), 0,
"Tests failed: \n" + failures)
if short == 2:
f = open('seedval', 'a')
f.write('Pass\n')
f.close()
def runTestPCAKNN(self, short = 0):
LOGGER.info('\nTesting PCA/k-NN classifier')
LOGGER.info('Mode=%s', short)
numDims = 10
numClasses = 10
k = 10
numPatternsPerClass = 100
numPatterns = int(.9 * numClasses * numPatternsPerClass)
numTests = numClasses * numPatternsPerClass - numPatterns
numSVDSamples = int(.1 * numPatterns)
keep = 1
train_data, train_class, test_data, test_class = \
pca_knn_data.generate(numDims, numClasses, k, numPatternsPerClass,
numPatterns, numTests, numSVDSamples, keep)
pca_knn = KNNClassifier(k=k,numSVDSamples=numSVDSamples,
numSVDDims=keep)
knn = KNNClassifier(k=k)
LOGGER.info('Training PCA k-NN')
for i in range(numPatterns):
knn.learn(train_data[i], train_class[i])
pca_knn.learn(train_data[i], train_class[i])
LOGGER.info('Testing PCA k-NN')
numWinnerFailures = 0
numInferenceFailures = 0
numDistFailures = 0
numAbsErrors = 0
for i in range(numTests):
winner, inference, dist, categoryDist = knn.infer(test_data[i])
pca_winner, pca_inference, pca_dist, pca_categoryDist \
= pca_knn.infer(test_data[i])
if winner != test_class[i]:
numAbsErrors += 1
if pca_winner != winner:
numWinnerFailures += 1
if (numpy.abs(pca_inference - inference) > 1e-4).any():
numInferenceFailures += 1
if (numpy.abs(pca_dist - dist) > 1e-4).any():
numDistFailures += 1
s0 = 100*float(numTests - numAbsErrors) / float(numTests)
s1 = 100*float(numTests - numWinnerFailures) / float(numTests)
s2 = 100*float(numTests - numInferenceFailures) / float(numTests)
s3 = 100*float(numTests - numDistFailures) / float(numTests)
LOGGER.info('PCA/k-NN success rate=%s%s', s0, '%')
LOGGER.info('Winner success=%s%s', s1, '%')
LOGGER.info('Inference success=%s%s', s2, '%')
LOGGER.info('Distance success=%s%s', s3, '%')
self.assertEqual(s1, 100.0,
"PCA/k-NN test failed")
def testKNNClassifierShort(self):
self.runTestKNNClassifier(0)
def testPCAKNNShort(self):
self.runTestPCAKNN(0)
def testKNNClassifierMedium(self):
self.runTestKNNClassifier(1)
def testPCAKNNMedium(self):
self.runTestPCAKNN(1)
def simulateKMoreThanOne():
"""A small test with k=3"""
failures = ""
LOGGER.info("Testing the sparse KNN Classifier with k=3")
knn = KNNClassifier(k=3)
v = numpy.zeros((6, 2))
v[0] = [1.0, 0.0]
v[1] = [1.0, 0.2]
v[2] = [1.0, 0.2]
v[3] = [1.0, 2.0]
v[4] = [1.0, 4.0]
v[5] = [1.0, 4.5]
knn.learn(v[0], 0)
knn.learn(v[1], 0)
knn.learn(v[2], 0)
knn.learn(v[3], 1)
knn.learn(v[4], 1)
knn.learn(v[5], 1)
winner, _inferenceResult, _dist, _categoryDist = knn.infer(v[0])
if winner != 0:
failures += "Inference failed with k=3\n"
winner, _inferenceResult, _dist, _categoryDist = knn.infer(v[2])
if winner != 0:
failures += "Inference failed with k=3\n"
winner, _inferenceResult, _dist, _categoryDist = knn.infer(v[3])
if winner != 0:
failures += "Inference failed with k=3\n"
winner, _inferenceResult, _dist, _categoryDist = knn.infer(v[5])
if winner != 1:
failures += "Inference failed with k=3\n"
if len(failures) == 0:
LOGGER.info("Tests passed.")
return failures
def simulateClassifier(knn, patternDict, testName, testDict=None):
"""Train this classifier instance with the given patterns."""
failures = ""
numPatterns = len(patternDict)
LOGGER.info("Training the classifier")
tick = time.time()
for i in patternDict.keys():
knn.learn(patternDict[i]['pattern'], patternDict[i]['category'])
tock = time.time()
LOGGER.info("Time Elapsed %s", tock-tick)
knnString = cPickle.dumps(knn)
LOGGER.info("Size of the classifier is %s", len(knnString))
# Run the classifier to infer categories on either the training data, or the
# test data (of it's provided).
error_count = 0
tick = time.time()
if testDict:
LOGGER.info("Testing the classifier on the test set")
for i in testDict.keys():
winner, _inferenceResult, _dist, _categoryDist \
= knn.infer(testDict[i]['pattern'])
if winner != testDict[i]['category']:
error_count += 1
else:
LOGGER.info("Testing the classifier on the training set")
LOGGER.info("Number of patterns: %s", len(patternDict))
for i in patternDict.keys():
LOGGER.info("Testing %s - %s %s", i, patternDict[i]['category'], \
len(patternDict[i]['pattern']))
winner, _inferenceResult, _dist, _categoryDist \
= knn.infer(patternDict[i]['pattern'])
if winner != patternDict[i]['category']:
error_count += 1
tock = time.time()
LOGGER.info("Time Elapsed %s", tock-tick)
error_rate = float(error_count) / numPatterns
LOGGER.info("Error rate is %s", error_rate)
if error_rate == 0:
LOGGER.info(testName + " passed")
else:
LOGGER.info(testName + " failed")
failures += testName + " failed\n"
return failures
def getNumTestPatterns(short=0):
"""Return the number of patterns and classes the test should use."""
if short==0:
LOGGER.info("Running short tests")
numPatterns = numpy.random.randint(300, 600)
numClasses = numpy.random.randint(50, 150)
elif short==1:
LOGGER.info("\nRunning medium tests")
numPatterns = numpy.random.randint(500, 1500)
numClasses = numpy.random.randint(50, 150)
else:
LOGGER.info("\nRunning long tests")
numPatterns = numpy.random.randint(500, 3000)
numClasses = numpy.random.randint(30, 1000)
LOGGER.info("number of patterns is %s", numPatterns)
LOGGER.info("number of classes is %s", numClasses)
return numPatterns, numClasses
if __name__ == "__main__":
unittest.main()
| agpl-3.0 | -3,643,670,659,823,021,600 | 29.654655 | 80 | 0.646062 | false |
Aristocles/CouchPotatoServer | libs/pyasn1/debug.py | 185 | 1541 | import sys
from pyasn1.compat.octets import octs2ints
from pyasn1 import error
from pyasn1 import __version__
flagNone = 0x0000
flagEncoder = 0x0001
flagDecoder = 0x0002
flagAll = 0xffff
flagMap = {
'encoder': flagEncoder,
'decoder': flagDecoder,
'all': flagAll
}
class Debug:
defaultPrinter = sys.stderr.write
def __init__(self, *flags):
self._flags = flagNone
self._printer = self.defaultPrinter
self('running pyasn1 version %s' % __version__)
for f in flags:
if f not in flagMap:
raise error.PyAsn1Error('bad debug flag %s' % (f,))
self._flags = self._flags | flagMap[f]
self('debug category \'%s\' enabled' % f)
def __str__(self):
return 'logger %s, flags %x' % (self._printer, self._flags)
def __call__(self, msg):
self._printer('DBG: %s\n' % msg)
def __and__(self, flag):
return self._flags & flag
def __rand__(self, flag):
return flag & self._flags
logger = 0
def setLogger(l):
global logger
logger = l
def hexdump(octets):
return ' '.join(
[ '%s%.2X' % (n%16 == 0 and ('\n%.5d: ' % n) or '', x)
for n,x in zip(range(len(octets)), octs2ints(octets)) ]
)
class Scope:
def __init__(self):
self._list = []
def __str__(self): return '.'.join(self._list)
def push(self, token):
self._list.append(token)
def pop(self):
return self._list.pop()
scope = Scope()
| gpl-3.0 | 6,887,053,370,497,646,000 | 22.707692 | 69 | 0.550292 | false |
xNovax/SickRage | lib/html5lib/treewalkers/pulldom.py | 1729 | 2302 | from __future__ import absolute_import, division, unicode_literals
from xml.dom.pulldom import START_ELEMENT, END_ELEMENT, \
COMMENT, IGNORABLE_WHITESPACE, CHARACTERS
from . import _base
from ..constants import voidElements
class TreeWalker(_base.TreeWalker):
def __iter__(self):
ignore_until = None
previous = None
for event in self.tree:
if previous is not None and \
(ignore_until is None or previous[1] is ignore_until):
if previous[1] is ignore_until:
ignore_until = None
for token in self.tokens(previous, event):
yield token
if token["type"] == "EmptyTag":
ignore_until = previous[1]
previous = event
if ignore_until is None or previous[1] is ignore_until:
for token in self.tokens(previous, None):
yield token
elif ignore_until is not None:
raise ValueError("Illformed DOM event stream: void element without END_ELEMENT")
def tokens(self, event, next):
type, node = event
if type == START_ELEMENT:
name = node.nodeName
namespace = node.namespaceURI
attrs = {}
for attr in list(node.attributes.keys()):
attr = node.getAttributeNode(attr)
attrs[(attr.namespaceURI, attr.localName)] = attr.value
if name in voidElements:
for token in self.emptyTag(namespace,
name,
attrs,
not next or next[1] is not node):
yield token
else:
yield self.startTag(namespace, name, attrs)
elif type == END_ELEMENT:
name = node.nodeName
namespace = node.namespaceURI
if name not in voidElements:
yield self.endTag(namespace, name)
elif type == COMMENT:
yield self.comment(node.nodeValue)
elif type in (IGNORABLE_WHITESPACE, CHARACTERS):
for token in self.text(node.nodeValue):
yield token
else:
yield self.unknown(type)
| gpl-3.0 | 6,885,719,616,701,431,000 | 35.539683 | 92 | 0.533015 | false |
gencer/sentry | src/sentry/south_migrations/0060_fill_filter_key.py | 5 | 28004 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
class Migration(DataMigration):
def forwards(self, orm):
"Write your forwards methods here."
# Note: Remember to use orm['appname.ModelName'] rather than "from appname.models..."
for project in orm['sentry.Project'].objects.all():
for key in orm['sentry.FilterValue'].objects.filter(project=project).values_list(
'key', flat=True
).distinct():
orm['sentry.FilterKey'].objects.create(project=project, key=key)
def backwards(self, orm):
"Write your backwards methods here."
models = {
'sentry.user': {
'Meta': {
'object_name': 'User',
'db_table': "'auth_user'"
},
'date_joined':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'email':
('django.db.models.fields.EmailField', [], {
'max_length': '75',
'blank': 'True'
}),
'first_name':
('django.db.models.fields.CharField', [], {
'max_length': '30',
'blank': 'True'
}),
'id': ('django.db.models.fields.AutoField', [], {
'primary_key': 'True'
}),
'is_active': ('django.db.models.fields.BooleanField', [], {
'default': 'True'
}),
'is_staff': ('django.db.models.fields.BooleanField', [], {
'default': 'False'
}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {
'default': 'False'
}),
'last_login':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'last_name':
('django.db.models.fields.CharField', [], {
'max_length': '30',
'blank': 'True'
}),
'password': ('django.db.models.fields.CharField', [], {
'max_length': '128'
}),
'username':
('django.db.models.fields.CharField', [], {
'unique': 'True',
'max_length': '30'
})
},
'contenttypes.contenttype': {
'Meta': {
'ordering': "('name',)",
'unique_together': "(('app_label', 'model'),)",
'object_name': 'ContentType',
'db_table': "'django_content_type'"
},
'app_label': ('django.db.models.fields.CharField', [], {
'max_length': '100'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'model': ('django.db.models.fields.CharField', [], {
'max_length': '100'
}),
'name': ('django.db.models.fields.CharField', [], {
'max_length': '100'
})
},
'sentry.event': {
'Meta': {
'unique_together': "(('project', 'event_id'),)",
'object_name': 'Event',
'db_table': "'sentry_message'"
},
'checksum':
('django.db.models.fields.CharField', [], {
'max_length': '32',
'db_index': 'True'
}),
'culprit': (
'django.db.models.fields.CharField', [], {
'max_length': '200',
'null': 'True',
'db_column': "'view'",
'blank': 'True'
}
),
'data': ('django.db.models.fields.TextField', [], {
'null': 'True',
'blank': 'True'
}),
'datetime': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'db_index': 'True'
}
),
'event_id': (
'django.db.models.fields.CharField', [], {
'max_length': '32',
'null': 'True',
'db_column': "'message_id'"
}
),
'group': (
'sentry.db.models.fields.FlexibleForeignKey', [], {
'blank': 'True',
'related_name': "'event_set'",
'null': 'True',
'to': "orm['sentry.Group']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'level': (
'django.db.models.fields.PositiveIntegerField', [], {
'default': '40',
'db_index': 'True',
'blank': 'True'
}
),
'logger': (
'django.db.models.fields.CharField', [], {
'default': "'root'",
'max_length': '64',
'db_index': 'True',
'blank': 'True'
}
),
'message': ('django.db.models.fields.TextField', [], {}),
'project': (
'sentry.db.models.fields.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']",
'null': 'True'
}
),
'server_name': (
'django.db.models.fields.CharField', [], {
'max_length': '128',
'null': 'True',
'db_index': 'True'
}
),
'site': (
'django.db.models.fields.CharField', [], {
'max_length': '128',
'null': 'True',
'db_index': 'True'
}
),
'time_spent': ('django.db.models.fields.FloatField', [], {
'null': 'True'
})
},
'sentry.filterkey': {
'Meta': {
'unique_together': "(('project', 'key'),)",
'object_name': 'FilterKey'
},
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key': ('django.db.models.fields.CharField', [], {
'max_length': '32'
}),
'project':
('sentry.db.models.fields.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
})
},
'sentry.filtervalue': {
'Meta': {
'unique_together': "(('project', 'key', 'value'),)",
'object_name': 'FilterValue'
},
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key': ('django.db.models.fields.CharField', [], {
'max_length': '32'
}),
'project': (
'sentry.db.models.fields.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']",
'null': 'True'
}
),
'value': ('django.db.models.fields.CharField', [], {
'max_length': '200'
})
},
'sentry.group': {
'Meta': {
'unique_together': "(('project', 'logger', 'culprit', 'checksum'),)",
'object_name': 'Group',
'db_table': "'sentry_groupedmessage'"
},
'active_at':
('django.db.models.fields.DateTimeField', [], {
'null': 'True',
'db_index': 'True'
}),
'checksum':
('django.db.models.fields.CharField', [], {
'max_length': '32',
'db_index': 'True'
}),
'culprit': (
'django.db.models.fields.CharField', [], {
'max_length': '200',
'null': 'True',
'db_column': "'view'",
'blank': 'True'
}
),
'data': ('django.db.models.fields.TextField', [], {
'null': 'True',
'blank': 'True'
}),
'first_seen': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'db_index': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'last_seen': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'db_index': 'True'
}
),
'level': (
'django.db.models.fields.PositiveIntegerField', [], {
'default': '40',
'db_index': 'True',
'blank': 'True'
}
),
'logger': (
'django.db.models.fields.CharField', [], {
'default': "'root'",
'max_length': '64',
'db_index': 'True',
'blank': 'True'
}
),
'message': ('django.db.models.fields.TextField', [], {}),
'project': (
'sentry.db.models.fields.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']",
'null': 'True'
}
),
'resolved_at':
('django.db.models.fields.DateTimeField', [], {
'null': 'True',
'db_index': 'True'
}),
'score': ('django.db.models.fields.IntegerField', [], {
'default': '0'
}),
'status': (
'django.db.models.fields.PositiveIntegerField', [], {
'default': '0',
'db_index': 'True'
}
),
'time_spent_count': ('django.db.models.fields.IntegerField', [], {
'default': '0'
}),
'time_spent_total': ('django.db.models.fields.FloatField', [], {
'default': '0'
}),
'times_seen': (
'django.db.models.fields.PositiveIntegerField', [], {
'default': '1',
'db_index': 'True'
}
),
'views': (
'django.db.models.fields.related.ManyToManyField', [], {
'to': "orm['sentry.View']",
'symmetrical': 'False',
'blank': 'True'
}
)
},
'sentry.groupbookmark': {
'Meta': {
'unique_together': "(('project', 'user', 'group'),)",
'object_name': 'GroupBookmark'
},
'group': (
'sentry.db.models.fields.FlexibleForeignKey', [], {
'related_name': "'bookmark_set'",
'to': "orm['sentry.Group']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'project': (
'sentry.db.models.fields.FlexibleForeignKey', [], {
'related_name': "'bookmark_set'",
'to': "orm['sentry.Project']"
}
),
'user': (
'sentry.db.models.fields.FlexibleForeignKey', [], {
'related_name': "'sentry_bookmark_set'",
'to': "orm['sentry.User']"
}
)
},
'sentry.groupmeta': {
'Meta': {
'unique_together': "(('group', 'key'),)",
'object_name': 'GroupMeta'
},
'group':
('sentry.db.models.fields.FlexibleForeignKey', [], {
'to': "orm['sentry.Group']"
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key': ('django.db.models.fields.CharField', [], {
'max_length': '64'
}),
'value': ('django.db.models.fields.TextField', [], {})
},
'sentry.messagecountbyminute': {
'Meta': {
'unique_together': "(('project', 'group', 'date'),)",
'object_name': 'MessageCountByMinute'
},
'date': ('django.db.models.fields.DateTimeField', [], {}),
'group':
('sentry.db.models.fields.FlexibleForeignKey', [], {
'to': "orm['sentry.Group']"
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'project': (
'sentry.db.models.fields.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']",
'null': 'True'
}
),
'time_spent_count': ('django.db.models.fields.IntegerField', [], {
'default': '0'
}),
'time_spent_total': ('django.db.models.fields.FloatField', [], {
'default': '0'
}),
'times_seen': ('django.db.models.fields.PositiveIntegerField', [], {
'default': '0'
})
},
'sentry.messagefiltervalue': {
'Meta': {
'unique_together': "(('project', 'key', 'value', 'group'),)",
'object_name': 'MessageFilterValue'
},
'first_seen': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'null': 'True',
'db_index': 'True'
}
),
'group':
('sentry.db.models.fields.FlexibleForeignKey', [], {
'to': "orm['sentry.Group']"
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key': ('django.db.models.fields.CharField', [], {
'max_length': '32'
}),
'last_seen': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'null': 'True',
'db_index': 'True'
}
),
'project': (
'sentry.db.models.fields.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']",
'null': 'True'
}
),
'times_seen': ('django.db.models.fields.PositiveIntegerField', [], {
'default': '0'
}),
'value': ('django.db.models.fields.CharField', [], {
'max_length': '200'
})
},
'sentry.messageindex': {
'Meta': {
'unique_together': "(('column', 'value', 'object_id'),)",
'object_name': 'MessageIndex'
},
'column': ('django.db.models.fields.CharField', [], {
'max_length': '32'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'value': ('django.db.models.fields.CharField', [], {
'max_length': '128'
})
},
'sentry.option': {
'Meta': {
'object_name': 'Option'
},
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key':
('django.db.models.fields.CharField', [], {
'unique': 'True',
'max_length': '64'
}),
'value': ('picklefield.fields.PickledObjectField', [], {})
},
'sentry.pendingteammember': {
'Meta': {
'unique_together': "(('team', 'email'),)",
'object_name': 'PendingTeamMember'
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'email': ('django.db.models.fields.EmailField', [], {
'max_length': '75'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'team': (
'sentry.db.models.fields.FlexibleForeignKey', [], {
'related_name': "'pending_member_set'",
'to': "orm['sentry.Team']"
}
),
'type': ('django.db.models.fields.IntegerField', [], {
'default': '0'
})
},
'sentry.project': {
'Meta': {
'object_name': 'Project'
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'name': ('django.db.models.fields.CharField', [], {
'max_length': '200'
}),
'owner': (
'sentry.db.models.fields.FlexibleForeignKey', [], {
'related_name': "'sentry_owned_project_set'",
'null': 'True',
'to': "orm['sentry.User']"
}
),
'public': ('django.db.models.fields.BooleanField', [], {
'default': 'False'
}),
'slug': (
'django.db.models.fields.SlugField', [], {
'max_length': '50',
'unique': 'True',
'null': 'True'
}
),
'status': (
'django.db.models.fields.PositiveIntegerField', [], {
'default': '0',
'db_index': 'True'
}
),
'team': (
'sentry.db.models.fields.FlexibleForeignKey', [], {
'to': "orm['sentry.Team']",
'null': 'True'
}
)
},
'sentry.projectcountbyminute': {
'Meta': {
'unique_together': "(('project', 'date'),)",
'object_name': 'ProjectCountByMinute'
},
'date': ('django.db.models.fields.DateTimeField', [], {}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'project': (
'sentry.db.models.fields.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']",
'null': 'True'
}
),
'time_spent_count': ('django.db.models.fields.IntegerField', [], {
'default': '0'
}),
'time_spent_total': ('django.db.models.fields.FloatField', [], {
'default': '0'
}),
'times_seen': ('django.db.models.fields.PositiveIntegerField', [], {
'default': '0'
})
},
'sentry.projectkey': {
'Meta': {
'object_name': 'ProjectKey'
},
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'project': (
'sentry.db.models.fields.FlexibleForeignKey', [], {
'related_name': "'key_set'",
'to': "orm['sentry.Project']"
}
),
'public_key': (
'django.db.models.fields.CharField', [], {
'max_length': '32',
'unique': 'True',
'null': 'True'
}
),
'secret_key': (
'django.db.models.fields.CharField', [], {
'max_length': '32',
'unique': 'True',
'null': 'True'
}
),
'user': (
'sentry.db.models.fields.FlexibleForeignKey', [], {
'to': "orm['sentry.User']",
'null': 'True'
}
)
},
'sentry.projectoption': {
'Meta': {
'unique_together': "(('project', 'key'),)",
'object_name': 'ProjectOption',
'db_table': "'sentry_projectoptions'"
},
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key': ('django.db.models.fields.CharField', [], {
'max_length': '64'
}),
'project':
('sentry.db.models.fields.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}),
'value': ('picklefield.fields.PickledObjectField', [], {})
},
'sentry.searchdocument': {
'Meta': {
'unique_together': "(('project', 'group'),)",
'object_name': 'SearchDocument'
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'date_changed':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'group':
('sentry.db.models.fields.FlexibleForeignKey', [], {
'to': "orm['sentry.Group']"
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'project':
('sentry.db.models.fields.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}),
'status': ('django.db.models.fields.PositiveIntegerField', [], {
'default': '0'
}),
'total_events': ('django.db.models.fields.PositiveIntegerField', [], {
'default': '1'
})
},
'sentry.searchtoken': {
'Meta': {
'unique_together': "(('document', 'field', 'token'),)",
'object_name': 'SearchToken'
},
'document': (
'sentry.db.models.fields.FlexibleForeignKey', [], {
'related_name': "'token_set'",
'to': "orm['sentry.SearchDocument']"
}
),
'field':
('django.db.models.fields.CharField', [], {
'default': "'text'",
'max_length': '64'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'times_seen': ('django.db.models.fields.PositiveIntegerField', [], {
'default': '1'
}),
'token': ('django.db.models.fields.CharField', [], {
'max_length': '128'
})
},
'sentry.team': {
'Meta': {
'object_name': 'Team'
},
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'name': ('django.db.models.fields.CharField', [], {
'max_length': '64'
}),
'owner':
('sentry.db.models.fields.FlexibleForeignKey', [], {
'to': "orm['sentry.User']"
}),
'slug':
('django.db.models.fields.SlugField', [], {
'unique': 'True',
'max_length': '50'
})
},
'sentry.teammember': {
'Meta': {
'unique_together': "(('team', 'user'),)",
'object_name': 'TeamMember'
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'is_active': ('django.db.models.fields.BooleanField', [], {
'default': 'True'
}),
'team': (
'sentry.db.models.fields.FlexibleForeignKey', [], {
'related_name': "'member_set'",
'to': "orm['sentry.Team']"
}
),
'type': ('django.db.models.fields.IntegerField', [], {
'default': '0'
}),
'user': (
'sentry.db.models.fields.FlexibleForeignKey', [], {
'related_name': "'sentry_teammember_set'",
'to': "orm['sentry.User']"
}
)
},
'sentry.useroption': {
'Meta': {
'unique_together': "(('user', 'project', 'key'),)",
'object_name': 'UserOption'
},
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key': ('django.db.models.fields.CharField', [], {
'max_length': '64'
}),
'project': (
'sentry.db.models.fields.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']",
'null': 'True'
}
),
'user':
('sentry.db.models.fields.FlexibleForeignKey', [], {
'to': "orm['sentry.User']"
}),
'value': ('picklefield.fields.PickledObjectField', [], {})
},
'sentry.view': {
'Meta': {
'object_name': 'View'
},
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'path':
('django.db.models.fields.CharField', [], {
'unique': 'True',
'max_length': '100'
}),
'verbose_name':
('django.db.models.fields.CharField', [], {
'max_length': '200',
'null': 'True'
}),
'verbose_name_plural':
('django.db.models.fields.CharField', [], {
'max_length': '200',
'null': 'True'
})
}
}
complete_apps = ['sentry']
symmetrical = True
| bsd-3-clause | 8,142,669,036,421,032,000 | 34.719388 | 93 | 0.381588 | false |
nwjs/chromium.src | third_party/blink/renderer/bindings/scripts/generate_bindings.py | 1 | 2542 | # Copyright 2019 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Runs the bindings code generator for the given tasks.
"""
import optparse
import sys
import bind_gen
import web_idl
def parse_options():
parser = optparse.OptionParser(usage="%prog [options] TASK...")
parser.add_option("--web_idl_database", type="string",
help="filepath of the input database")
parser.add_option("--root_src_dir", type="string",
help='root directory of chromium project, i.e. "//"')
parser.add_option("--root_gen_dir", type="string",
help='root directory of generated code files, i.e. '
'"//out/Default/gen"')
parser.add_option("--output_core_reldir", type="string",
help='output directory for "core" component relative to '
'root_gen_dir')
parser.add_option("--output_modules_reldir", type="string",
help='output directory for "modules" component relative '
'to root_gen_dir')
options, args = parser.parse_args()
required_option_names = (
"web_idl_database", "root_src_dir", "root_gen_dir",
"output_core_reldir", "output_modules_reldir")
for opt_name in required_option_names:
if getattr(options, opt_name) is None:
parser.error("--{} is a required option.".format(opt_name))
if not args:
parser.error("No argument specified.")
return options, args
def main():
options, tasks = parse_options()
dispatch_table = {
'dictionary': bind_gen.generate_dictionaries,
'enumeration': bind_gen.generate_enumerations,
'interface': bind_gen.generate_interfaces,
'union': bind_gen.generate_unions,
}
for task in tasks:
if task not in dispatch_table:
sys.exit("Unknown task: {}".format(task))
web_idl_database = web_idl.Database.read_from_file(options.web_idl_database)
component_reldirs = {
web_idl.Component('core'): options.output_core_reldir,
web_idl.Component('modules'): options.output_modules_reldir,
}
bind_gen.init(root_src_dir=options.root_src_dir,
root_gen_dir=options.root_gen_dir,
component_reldirs=component_reldirs)
for task in tasks:
dispatch_table[task](web_idl_database=web_idl_database)
if __name__ == '__main__':
main()
| bsd-3-clause | 4,693,703,473,959,486,000 | 32.893333 | 80 | 0.612116 | false |
piece601/Openkore | src/scons-local-2.0.1/SCons/Warnings.py | 61 | 6806 | #
# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
"""SCons.Warnings
This file implements the warnings framework for SCons.
"""
__revision__ = "src/engine/SCons/Warnings.py 5134 2010/08/16 23:02:40 bdeegan"
import sys
import SCons.Errors
class Warning(SCons.Errors.UserError):
pass
class WarningOnByDefault(Warning):
pass
# NOTE: If you add a new warning class, add it to the man page, too!
class CacheWriteErrorWarning(Warning):
pass
class CorruptSConsignWarning(WarningOnByDefault):
pass
class DependencyWarning(Warning):
pass
class DuplicateEnvironmentWarning(WarningOnByDefault):
pass
class FutureReservedVariableWarning(WarningOnByDefault):
pass
class LinkWarning(WarningOnByDefault):
pass
class MisleadingKeywordsWarning(WarningOnByDefault):
pass
class MissingSConscriptWarning(WarningOnByDefault):
pass
class NoMD5ModuleWarning(WarningOnByDefault):
pass
class NoMetaclassSupportWarning(WarningOnByDefault):
pass
class NoObjectCountWarning(WarningOnByDefault):
pass
class NoParallelSupportWarning(WarningOnByDefault):
pass
class ReservedVariableWarning(WarningOnByDefault):
pass
class StackSizeWarning(WarningOnByDefault):
pass
class VisualCMissingWarning(WarningOnByDefault):
pass
# Used when MSVC_VERSION and MSVS_VERSION do not point to the
# same version (MSVS_VERSION is deprecated)
class VisualVersionMismatch(WarningOnByDefault):
pass
class VisualStudioMissingWarning(Warning):
pass
class FortranCxxMixWarning(LinkWarning):
pass
# Deprecation warnings
class FutureDeprecatedWarning(Warning):
pass
class DeprecatedWarning(Warning):
pass
class MandatoryDeprecatedWarning(DeprecatedWarning):
pass
# Special case; base always stays DeprecatedWarning
class PythonVersionWarning(DeprecatedWarning):
pass
class DeprecatedSourceCodeWarning(FutureDeprecatedWarning):
pass
class DeprecatedBuildDirWarning(DeprecatedWarning):
pass
class TaskmasterNeedsExecuteWarning(DeprecatedWarning):
pass
class DeprecatedCopyWarning(MandatoryDeprecatedWarning):
pass
class DeprecatedOptionsWarning(MandatoryDeprecatedWarning):
pass
class DeprecatedSourceSignaturesWarning(MandatoryDeprecatedWarning):
pass
class DeprecatedTargetSignaturesWarning(MandatoryDeprecatedWarning):
pass
class DeprecatedDebugOptionsWarning(MandatoryDeprecatedWarning):
pass
class DeprecatedSigModuleWarning(MandatoryDeprecatedWarning):
pass
class DeprecatedBuilderKeywordsWarning(MandatoryDeprecatedWarning):
pass
# The below is a list of 2-tuples. The first element is a class object.
# The second element is true if that class is enabled, false if it is disabled.
_enabled = []
# If set, raise the warning as an exception
_warningAsException = 0
# If not None, a function to call with the warning
_warningOut = None
def suppressWarningClass(clazz):
"""Suppresses all warnings that are of type clazz or
derived from clazz."""
_enabled.insert(0, (clazz, 0))
def enableWarningClass(clazz):
"""Enables all warnings that are of type clazz or
derived from clazz."""
_enabled.insert(0, (clazz, 1))
def warningAsException(flag=1):
"""Turn warnings into exceptions. Returns the old value of the flag."""
global _warningAsException
old = _warningAsException
_warningAsException = flag
return old
def warn(clazz, *args):
global _enabled, _warningAsException, _warningOut
warning = clazz(args)
for clazz, flag in _enabled:
if isinstance(warning, clazz):
if flag:
if _warningAsException:
raise warning
if _warningOut:
_warningOut(warning)
break
def process_warn_strings(arguments):
"""Process string specifications of enabling/disabling warnings,
as passed to the --warn option or the SetOption('warn') function.
An argument to this option should be of the form <warning-class>
or no-<warning-class>. The warning class is munged in order
to get an actual class name from the classes above, which we
need to pass to the {enable,disable}WarningClass() functions.
The supplied <warning-class> is split on hyphens, each element
is capitalized, then smushed back together. Then the string
"Warning" is appended to get the class name.
For example, 'deprecated' will enable the DeprecatedWarning
class. 'no-dependency' will disable the DependencyWarning class.
As a special case, --warn=all and --warn=no-all will enable or
disable (respectively) the base Warning class of all warnings.
"""
def _capitalize(s):
if s[:5] == "scons":
return "SCons" + s[5:]
else:
return s.capitalize()
for arg in arguments:
elems = arg.lower().split('-')
enable = 1
if elems[0] == 'no':
enable = 0
del elems[0]
if len(elems) == 1 and elems[0] == 'all':
class_name = "Warning"
else:
class_name = ''.join(map(_capitalize, elems)) + "Warning"
try:
clazz = globals()[class_name]
except KeyError:
sys.stderr.write("No warning type: '%s'\n" % arg)
else:
if enable:
enableWarningClass(clazz)
elif issubclass(clazz, MandatoryDeprecatedWarning):
fmt = "Can not disable mandataory warning: '%s'\n"
sys.stderr.write(fmt % arg)
else:
suppressWarningClass(clazz)
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
| gpl-2.0 | 6,010,045,399,911,633,000 | 26.666667 | 95 | 0.714664 | false |
TomHeatwole/osf.io | scripts/tests/test_triggered_mails.py | 31 | 2280 | import mock
from datetime import datetime, timedelta
from nose.tools import * # noqa
from tests.base import OsfTestCase
from tests.factories import UserFactory
from scripts.triggered_mails import main, find_inactive_users_with_no_inactivity_email_sent_or_queued
from website import mails
class TestTriggeredMails(OsfTestCase):
def setUp(self):
super(TestTriggeredMails, self).setUp()
self.user = UserFactory()
self.user.date_last_login = datetime.utcnow()
self.user.save()
@mock.patch('website.mails.queue_mail')
def test_dont_trigger_no_login_mail(self, mock_queue):
self.user.date_last_login = datetime.utcnow() - timedelta(seconds=6)
self.user.save()
main(dry_run=False)
assert_false(mock_queue.called)
@mock.patch('website.mails.queue_mail')
def test_trigger_no_login_mail(self, mock_queue):
self.user.date_last_login = datetime.utcnow() - timedelta(weeks=6)
self.user.save()
main(dry_run=False)
mock_queue.assert_called_with(
user=mock.ANY,
fullname=self.user.fullname,
to_addr=self.user.username,
mail={'callback': mock.ANY, 'template': 'no_login', 'subject': mock.ANY},
send_at=mock.ANY,
)
@mock.patch('website.mails.send_mail')
def test_find_inactive_users_with_no_inactivity_email_sent_or_queued(self, mock_mail):
user_active = UserFactory(fullname='Spot')
user_inactive = UserFactory(fullname='Nucha')
user_already_received_mail = UserFactory(fullname='Pep')
user_active.date_last_login = datetime.utcnow() - timedelta(seconds=6)
user_inactive.date_last_login = datetime.utcnow() - timedelta(weeks=6)
user_already_received_mail.date_last_login = datetime.utcnow() - timedelta(weeks=6)
user_active.save()
user_inactive.save()
user_already_received_mail.save()
mails.queue_mail(to_addr=user_already_received_mail.username,
send_at=datetime.utcnow(),
user=user_already_received_mail,
mail=mails.NO_LOGIN)
users = find_inactive_users_with_no_inactivity_email_sent_or_queued()
assert_equal(len(users), 1)
| apache-2.0 | 8,798,485,361,368,447,000 | 40.454545 | 101 | 0.649123 | false |
tuxfux-hlp-notes/python-batches | archieves/batch-61/modules/myenv/lib/python2.7/site-packages/pip/_vendor/requests/packages/urllib3/packages/ordered_dict.py | 1093 | 8936 | # Backport of OrderedDict() class that runs on Python 2.4, 2.5, 2.6, 2.7 and pypy.
# Passes Python2.7's test suite and incorporates all the latest updates.
# Copyright 2009 Raymond Hettinger, released under the MIT License.
# http://code.activestate.com/recipes/576693/
try:
from thread import get_ident as _get_ident
except ImportError:
from dummy_thread import get_ident as _get_ident
try:
from _abcoll import KeysView, ValuesView, ItemsView
except ImportError:
pass
class OrderedDict(dict):
'Dictionary that remembers insertion order'
# An inherited dict maps keys to values.
# The inherited dict provides __getitem__, __len__, __contains__, and get.
# The remaining methods are order-aware.
# Big-O running times for all methods are the same as for regular dictionaries.
# The internal self.__map dictionary maps keys to links in a doubly linked list.
# The circular doubly linked list starts and ends with a sentinel element.
# The sentinel element never gets deleted (this simplifies the algorithm).
# Each link is stored as a list of length three: [PREV, NEXT, KEY].
def __init__(self, *args, **kwds):
'''Initialize an ordered dictionary. Signature is the same as for
regular dictionaries, but keyword arguments are not recommended
because their insertion order is arbitrary.
'''
if len(args) > 1:
raise TypeError('expected at most 1 arguments, got %d' % len(args))
try:
self.__root
except AttributeError:
self.__root = root = [] # sentinel node
root[:] = [root, root, None]
self.__map = {}
self.__update(*args, **kwds)
def __setitem__(self, key, value, dict_setitem=dict.__setitem__):
'od.__setitem__(i, y) <==> od[i]=y'
# Setting a new item creates a new link which goes at the end of the linked
# list, and the inherited dictionary is updated with the new key/value pair.
if key not in self:
root = self.__root
last = root[0]
last[1] = root[0] = self.__map[key] = [last, root, key]
dict_setitem(self, key, value)
def __delitem__(self, key, dict_delitem=dict.__delitem__):
'od.__delitem__(y) <==> del od[y]'
# Deleting an existing item uses self.__map to find the link which is
# then removed by updating the links in the predecessor and successor nodes.
dict_delitem(self, key)
link_prev, link_next, key = self.__map.pop(key)
link_prev[1] = link_next
link_next[0] = link_prev
def __iter__(self):
'od.__iter__() <==> iter(od)'
root = self.__root
curr = root[1]
while curr is not root:
yield curr[2]
curr = curr[1]
def __reversed__(self):
'od.__reversed__() <==> reversed(od)'
root = self.__root
curr = root[0]
while curr is not root:
yield curr[2]
curr = curr[0]
def clear(self):
'od.clear() -> None. Remove all items from od.'
try:
for node in self.__map.itervalues():
del node[:]
root = self.__root
root[:] = [root, root, None]
self.__map.clear()
except AttributeError:
pass
dict.clear(self)
def popitem(self, last=True):
'''od.popitem() -> (k, v), return and remove a (key, value) pair.
Pairs are returned in LIFO order if last is true or FIFO order if false.
'''
if not self:
raise KeyError('dictionary is empty')
root = self.__root
if last:
link = root[0]
link_prev = link[0]
link_prev[1] = root
root[0] = link_prev
else:
link = root[1]
link_next = link[1]
root[1] = link_next
link_next[0] = root
key = link[2]
del self.__map[key]
value = dict.pop(self, key)
return key, value
# -- the following methods do not depend on the internal structure --
def keys(self):
'od.keys() -> list of keys in od'
return list(self)
def values(self):
'od.values() -> list of values in od'
return [self[key] for key in self]
def items(self):
'od.items() -> list of (key, value) pairs in od'
return [(key, self[key]) for key in self]
def iterkeys(self):
'od.iterkeys() -> an iterator over the keys in od'
return iter(self)
def itervalues(self):
'od.itervalues -> an iterator over the values in od'
for k in self:
yield self[k]
def iteritems(self):
'od.iteritems -> an iterator over the (key, value) items in od'
for k in self:
yield (k, self[k])
def update(*args, **kwds):
'''od.update(E, **F) -> None. Update od from dict/iterable E and F.
If E is a dict instance, does: for k in E: od[k] = E[k]
If E has a .keys() method, does: for k in E.keys(): od[k] = E[k]
Or if E is an iterable of items, does: for k, v in E: od[k] = v
In either case, this is followed by: for k, v in F.items(): od[k] = v
'''
if len(args) > 2:
raise TypeError('update() takes at most 2 positional '
'arguments (%d given)' % (len(args),))
elif not args:
raise TypeError('update() takes at least 1 argument (0 given)')
self = args[0]
# Make progressively weaker assumptions about "other"
other = ()
if len(args) == 2:
other = args[1]
if isinstance(other, dict):
for key in other:
self[key] = other[key]
elif hasattr(other, 'keys'):
for key in other.keys():
self[key] = other[key]
else:
for key, value in other:
self[key] = value
for key, value in kwds.items():
self[key] = value
__update = update # let subclasses override update without breaking __init__
__marker = object()
def pop(self, key, default=__marker):
'''od.pop(k[,d]) -> v, remove specified key and return the corresponding value.
If key is not found, d is returned if given, otherwise KeyError is raised.
'''
if key in self:
result = self[key]
del self[key]
return result
if default is self.__marker:
raise KeyError(key)
return default
def setdefault(self, key, default=None):
'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od'
if key in self:
return self[key]
self[key] = default
return default
def __repr__(self, _repr_running={}):
'od.__repr__() <==> repr(od)'
call_key = id(self), _get_ident()
if call_key in _repr_running:
return '...'
_repr_running[call_key] = 1
try:
if not self:
return '%s()' % (self.__class__.__name__,)
return '%s(%r)' % (self.__class__.__name__, self.items())
finally:
del _repr_running[call_key]
def __reduce__(self):
'Return state information for pickling'
items = [[k, self[k]] for k in self]
inst_dict = vars(self).copy()
for k in vars(OrderedDict()):
inst_dict.pop(k, None)
if inst_dict:
return (self.__class__, (items,), inst_dict)
return self.__class__, (items,)
def copy(self):
'od.copy() -> a shallow copy of od'
return self.__class__(self)
@classmethod
def fromkeys(cls, iterable, value=None):
'''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S
and values equal to v (which defaults to None).
'''
d = cls()
for key in iterable:
d[key] = value
return d
def __eq__(self, other):
'''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive
while comparison to a regular mapping is order-insensitive.
'''
if isinstance(other, OrderedDict):
return len(self)==len(other) and self.items() == other.items()
return dict.__eq__(self, other)
def __ne__(self, other):
return not self == other
# -- the following methods are only used in Python 2.7 --
def viewkeys(self):
"od.viewkeys() -> a set-like object providing a view on od's keys"
return KeysView(self)
def viewvalues(self):
"od.viewvalues() -> an object providing a view on od's values"
return ValuesView(self)
def viewitems(self):
"od.viewitems() -> a set-like object providing a view on od's items"
return ItemsView(self)
| gpl-3.0 | -2,244,551,969,868,659,200 | 33.369231 | 87 | 0.548568 | false |
EvanK/ansible | test/units/modules/network/f5/test_bigip_configsync_action.py | 21 | 3910 | # -*- coding: utf-8 -*-
#
# Copyright (c) 2017 F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import json
import pytest
import sys
if sys.version_info < (2, 7):
pytestmark = pytest.mark.skip("F5 Ansible modules require Python >= 2.7")
from ansible.module_utils.basic import AnsibleModule
try:
from library.modules.bigip_configsync_action import Parameters
from library.modules.bigip_configsync_action import ModuleManager
from library.modules.bigip_configsync_action import ArgumentSpec
# In Ansible 2.8, Ansible changed import paths.
from test.units.compat import unittest
from test.units.compat.mock import Mock
from test.units.compat.mock import patch
from test.units.modules.utils import set_module_args
except ImportError:
from ansible.modules.network.f5.bigip_configsync_action import Parameters
from ansible.modules.network.f5.bigip_configsync_action import ModuleManager
from ansible.modules.network.f5.bigip_configsync_action import ArgumentSpec
# Ansible 2.8 imports
from units.compat import unittest
from units.compat.mock import Mock
from units.compat.mock import patch
from units.modules.utils import set_module_args
fixture_path = os.path.join(os.path.dirname(__file__), 'fixtures')
fixture_data = {}
def load_fixture(name):
path = os.path.join(fixture_path, name)
if path in fixture_data:
return fixture_data[path]
with open(path) as f:
data = f.read()
try:
data = json.loads(data)
except Exception:
pass
fixture_data[path] = data
return data
class TestParameters(unittest.TestCase):
def test_module_parameters(self):
args = dict(
sync_device_to_group=True,
sync_group_to_device=True,
overwrite_config=True,
device_group="foo"
)
p = Parameters(params=args)
assert p.sync_device_to_group is True
assert p.sync_group_to_device is True
assert p.overwrite_config is True
assert p.device_group == 'foo'
def test_module_parameters_yes_no(self):
args = dict(
sync_device_to_group='yes',
sync_group_to_device='no',
overwrite_config='yes',
device_group="foo"
)
p = Parameters(params=args)
assert p.sync_device_to_group == 'yes'
assert p.sync_group_to_device == 'no'
assert p.overwrite_config == 'yes'
assert p.device_group == 'foo'
class TestManager(unittest.TestCase):
def setUp(self):
self.spec = ArgumentSpec()
self.patcher1 = patch('time.sleep')
self.patcher1.start()
def tearDown(self):
self.patcher1.stop()
def test_update_agent_status_traps(self, *args):
set_module_args(dict(
sync_device_to_group='yes',
device_group="foo",
password='password',
server='localhost',
user='admin'
))
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode
)
mm = ModuleManager(module=module)
# Override methods to force specific logic in the module to happen
mm._device_group_exists = Mock(return_value=True)
mm._sync_to_group_required = Mock(return_value=False)
mm.execute_on_device = Mock(return_value=True)
mm.read_current_from_device = Mock(return_value=None)
mm._get_status_from_resource = Mock()
mm._get_status_from_resource.side_effect = [
'Changes Pending', 'Awaiting Initial Sync', 'In Sync'
]
results = mm.exec_module()
assert results['changed'] is True
| gpl-3.0 | -5,540,787,953,279,260,000 | 28.847328 | 91 | 0.644757 | false |
JD-P/HackerNewsToJSON | hn2json.py | 1 | 6275 | #!/usr/bin/env python
"""Python-Pinboard
Python script for downloading your saved stories and saved comments on Hacker News
and converting them to a JSON format for easy use.
Originally written on Pythonista on iPad
"""
__version__ = "1.1"
__license__ = "BSD"
__copyright__ = "Copyright 2013-2014, Luciano Fiandesio"
__author__ = "Luciano Fiandesio <http://fiandes.io/> & John David Pressman <http://jdpressman.com>"
import argparse
import time
import re
import sys
import urllib
import urllib.parse as urlparse
import json
from bs4 import BeautifulSoup
import requests
from types import *
import xml.etree.ElementTree as xml
HACKERNEWS = 'https://news.ycombinator.com'
parser = argparse.ArgumentParser()
parser.add_argument("username", help="The Hacker News username to grab the stories from.")
parser.add_argument("password", help="The password to login with using the username.")
parser.add_argument("-f", "--file", help="Filepath to store the JSON document at.")
parser.add_argument("-n", "--number", default=1, type=int, help="Number of pages to grab, default 1. 0 grabs all pages.")
parser.add_argument("-s", "--stories", action="store_true", help="Grab stories only.")
parser.add_argument("-c", "--comments", action="store_true", help="Grab comments only.")
arguments = parser.parse_args()
def getSavedStories(session, hnuser, page_range):
"""Return a list of story IDs representing your saved stories.
This function does not return the actual metadata associated, just the IDs.
This list is traversed and each item inside is grabbed using the Hacker News
API by story ID."""
story_ids = []
for page_index in page_range:
saved = session.get(HACKERNEWS + '/upvoted?id=' +
hnuser + "&p=" + str(page_index))
soup = BeautifulSoup(saved.content)
for tag in soup.findAll('td',attrs={'class':'subtext'}):
if tag.a is not type(None):
a_tags = tag.find_all('a')
for a_tag in a_tags:
if a_tag['href'][:5] == 'item?':
story_id = a_tag['href'].split('id=')[1]
story_ids.append(story_id)
break
return story_ids
def getSavedComments(session, hnuser, page_range):
"""Return a list of IDs representing your saved comments.
This function does not return the actual metadata associated, just the IDs.
This list is traversed and each item inside is grabbed using the Hacker News
API by ID."""
comment_ids = []
for page_index in page_range:
saved = session.get(HACKERNEWS + '/upvoted?id=' +
hnuser + "&comments=t" + "&p=" + str(page_index))
soup = BeautifulSoup(saved.content)
for tag in soup.findAll('td',attrs={'class':'default'}):
if tag.a is not type(None):
a_tags = tag.find_all('a')
for a_tag in a_tags:
if a_tag['href'][:5] == 'item?':
comment_id = a_tag['href'].split('id=')[1]
comment_ids.append(comment_id)
break
return comment_ids
def loginToHackerNews(username, password):
s = requests.Session() # init a session (use cookies across requests)
headers = { # we need to specify an header to get the right cookie
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.9; rv:25.0) Gecko/20100101 Firefox/25.0',
'Accept' : "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8",
}
# Build the login POST data and make the login request.
payload = {
'whence': 'news',
'acct': username,
'pw': password
}
auth = s.post(HACKERNEWS+'/login', data=payload, headers=headers )
if 'Bad login' in str(auth.content):
raise Exception("Hacker News authentication failed!")
if not username in str(auth.content):
raise Exception("Hacker News didn't succeed, username not displayed.")
return s # return the http session
def getHackerNewsItem(item_id):
"""Get an 'item' as specified in the HackerNews v0 API."""
time.sleep(0.2)
item_json_link = "https://hacker-news.firebaseio.com/v0/item/" + item_id + ".json"
try:
with urllib.request.urlopen(item_json_link) as item_json:
return json.loads(item_json.read().decode('utf-8'))
except urllib.error.URLError:
return {"title":"Item " + item_id + " could not be retrieved",
"id":item_id}
def item2stderr(item_id, item_count, item_total):
sys.stderr.write("Got item " + item_id + ". ({} of {})\n".format(item_count,
item_total))
def main():
json_items = {"saved_stories":list(), "saved_comments":list()}
if arguments.stories and arguments.comments:
# Assume that if somebody uses both flags they mean to grab both
arguments.stories = False
arguments.comments = False
item_count = 0
session = loginToHackerNews(arguments.username, arguments.password)
page_range = range(1, arguments.number + 1)
if arguments.stories or (not arguments.stories and not arguments.comments):
story_ids = getSavedStories(session,
arguments.username,
page_range)
for story_id in story_ids:
json_items["saved_stories"].append(getHackerNewsItem(story_id))
item_count += 1
item2stderr(story_id, item_count, len(story_ids))
if arguments.comments or (not arguments.stories and not arguments.comments):
item_count = 0
comment_ids = getSavedComments(session,
arguments.username,
page_range)
for comment_id in comment_ids:
json_items["saved_comments"].append(getHackerNewsItem(comment_id))
item_count += 1
item2stderr(comment_id, item_count, len(comment_ids))
if arguments.file:
with open(arguments.file, 'w') as outfile:
json.dump(json_items, outfile)
else:
print(json.dumps(json_items))
if __name__ == "__main__":
main()
| bsd-3-clause | 6,621,482,087,074,103,000 | 40.013072 | 121 | 0.61004 | false |
applidget/zxing-ios | cpp/scons/scons-local-2.0.0.final.0/SCons/Util.py | 34 | 49016 | """SCons.Util
Various utility functions go here.
"""
#
# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
__revision__ = "src/engine/SCons/Util.py 5023 2010/06/14 22:05:46 scons"
import os
import sys
import copy
import re
import types
from collections import UserDict, UserList, UserString
# Don't "from types import ..." these because we need to get at the
# types module later to look for UnicodeType.
InstanceType = types.InstanceType
MethodType = types.MethodType
FunctionType = types.FunctionType
try: unicode
except NameError: UnicodeType = None
else: UnicodeType = unicode
def dictify(keys, values, result={}):
for k, v in zip(keys, values):
result[k] = v
return result
_altsep = os.altsep
if _altsep is None and sys.platform == 'win32':
# My ActivePython 2.0.1 doesn't set os.altsep! What gives?
_altsep = '/'
if _altsep:
def rightmost_separator(path, sep):
return max(path.rfind(sep), path.rfind(_altsep))
else:
def rightmost_separator(path, sep):
return path.rfind(sep)
# First two from the Python Cookbook, just for completeness.
# (Yeah, yeah, YAGNI...)
def containsAny(str, set):
"""Check whether sequence str contains ANY of the items in set."""
for c in set:
if c in str: return 1
return 0
def containsAll(str, set):
"""Check whether sequence str contains ALL of the items in set."""
for c in set:
if c not in str: return 0
return 1
def containsOnly(str, set):
"""Check whether sequence str contains ONLY items in set."""
for c in str:
if c not in set: return 0
return 1
def splitext(path):
"Same as os.path.splitext() but faster."
sep = rightmost_separator(path, os.sep)
dot = path.rfind('.')
# An ext is only real if it has at least one non-digit char
if dot > sep and not containsOnly(path[dot:], "0123456789."):
return path[:dot],path[dot:]
else:
return path,""
def updrive(path):
"""
Make the drive letter (if any) upper case.
This is useful because Windows is inconsitent on the case
of the drive letter, which can cause inconsistencies when
calculating command signatures.
"""
drive, rest = os.path.splitdrive(path)
if drive:
path = drive.upper() + rest
return path
class NodeList(UserList):
"""This class is almost exactly like a regular list of Nodes
(actually it can hold any object), with one important difference.
If you try to get an attribute from this list, it will return that
attribute from every item in the list. For example:
>>> someList = NodeList([ ' foo ', ' bar ' ])
>>> someList.strip()
[ 'foo', 'bar' ]
"""
def __nonzero__(self):
return len(self.data) != 0
def __str__(self):
return ' '.join(map(str, self.data))
def __iter__(self):
return iter(self.data)
def __call__(self, *args, **kwargs):
result = [x(*args, **kwargs) for x in self.data]
return self.__class__(result)
def __getattr__(self, name):
result = [getattr(x, name) for x in self.data]
return self.__class__(result)
_get_env_var = re.compile(r'^\$([_a-zA-Z]\w*|{[_a-zA-Z]\w*})$')
def get_environment_var(varstr):
"""Given a string, first determine if it looks like a reference
to a single environment variable, like "$FOO" or "${FOO}".
If so, return that variable with no decorations ("FOO").
If not, return None."""
mo=_get_env_var.match(to_String(varstr))
if mo:
var = mo.group(1)
if var[0] == '{':
return var[1:-1]
else:
return var
else:
return None
class DisplayEngine(object):
print_it = True
def __call__(self, text, append_newline=1):
if not self.print_it:
return
if append_newline: text = text + '\n'
try:
sys.stdout.write(unicode(text))
except IOError:
# Stdout might be connected to a pipe that has been closed
# by now. The most likely reason for the pipe being closed
# is that the user has press ctrl-c. It this is the case,
# then SCons is currently shutdown. We therefore ignore
# IOError's here so that SCons can continue and shutdown
# properly so that the .sconsign is correctly written
# before SCons exits.
pass
def set_mode(self, mode):
self.print_it = mode
def render_tree(root, child_func, prune=0, margin=[0], visited={}):
"""
Render a tree of nodes into an ASCII tree view.
root - the root node of the tree
child_func - the function called to get the children of a node
prune - don't visit the same node twice
margin - the format of the left margin to use for children of root.
1 results in a pipe, and 0 results in no pipe.
visited - a dictionary of visited nodes in the current branch if not prune,
or in the whole tree if prune.
"""
rname = str(root)
children = child_func(root)
retval = ""
for pipe in margin[:-1]:
if pipe:
retval = retval + "| "
else:
retval = retval + " "
if rname in visited:
return retval + "+-[" + rname + "]\n"
retval = retval + "+-" + rname + "\n"
if not prune:
visited = copy.copy(visited)
visited[rname] = 1
for i in range(len(children)):
margin.append(i<len(children)-1)
retval = retval + render_tree(children[i], child_func, prune, margin, visited
)
margin.pop()
return retval
IDX = lambda N: N and 1 or 0
def print_tree(root, child_func, prune=0, showtags=0, margin=[0], visited={}):
"""
Print a tree of nodes. This is like render_tree, except it prints
lines directly instead of creating a string representation in memory,
so that huge trees can be printed.
root - the root node of the tree
child_func - the function called to get the children of a node
prune - don't visit the same node twice
showtags - print status information to the left of each node line
margin - the format of the left margin to use for children of root.
1 results in a pipe, and 0 results in no pipe.
visited - a dictionary of visited nodes in the current branch if not prune,
or in the whole tree if prune.
"""
rname = str(root)
if showtags:
if showtags == 2:
legend = (' E = exists\n' +
' R = exists in repository only\n' +
' b = implicit builder\n' +
' B = explicit builder\n' +
' S = side effect\n' +
' P = precious\n' +
' A = always build\n' +
' C = current\n' +
' N = no clean\n' +
' H = no cache\n' +
'\n')
sys.stdout.write(unicode(legend))
tags = ['[']
tags.append(' E'[IDX(root.exists())])
tags.append(' R'[IDX(root.rexists() and not root.exists())])
tags.append(' BbB'[[0,1][IDX(root.has_explicit_builder())] +
[0,2][IDX(root.has_builder())]])
tags.append(' S'[IDX(root.side_effect)])
tags.append(' P'[IDX(root.precious)])
tags.append(' A'[IDX(root.always_build)])
tags.append(' C'[IDX(root.is_up_to_date())])
tags.append(' N'[IDX(root.noclean)])
tags.append(' H'[IDX(root.nocache)])
tags.append(']')
else:
tags = []
def MMM(m):
return [" ","| "][m]
margins = list(map(MMM, margin[:-1]))
children = child_func(root)
if prune and rname in visited and children:
sys.stdout.write(''.join(tags + margins + ['+-[', rname, ']']) + u'\n')
return
sys.stdout.write(''.join(tags + margins + ['+-', rname]) + u'\n')
visited[rname] = 1
if children:
margin.append(1)
idx = IDX(showtags)
for C in children[:-1]:
print_tree(C, child_func, prune, idx, margin, visited)
margin[-1] = 0
print_tree(children[-1], child_func, prune, idx, margin, visited)
margin.pop()
# Functions for deciding if things are like various types, mainly to
# handle UserDict, UserList and UserString like their underlying types.
#
# Yes, all of this manual testing breaks polymorphism, and the real
# Pythonic way to do all of this would be to just try it and handle the
# exception, but handling the exception when it's not the right type is
# often too slow.
# We are using the following trick to speed up these
# functions. Default arguments are used to take a snapshot of the
# the global functions and constants used by these functions. This
# transforms accesses to global variable into local variables
# accesses (i.e. LOAD_FAST instead of LOAD_GLOBAL).
DictTypes = (dict, UserDict)
ListTypes = (list, UserList)
SequenceTypes = (list, tuple, UserList)
# Note that profiling data shows a speed-up when comparing
# explicitely with str and unicode instead of simply comparing
# with basestring. (at least on Python 2.5.1)
StringTypes = (str, unicode, UserString)
# Empirically, it is faster to check explicitely for str and
# unicode than for basestring.
BaseStringTypes = (str, unicode)
def is_Dict(obj, isinstance=isinstance, DictTypes=DictTypes):
return isinstance(obj, DictTypes)
def is_List(obj, isinstance=isinstance, ListTypes=ListTypes):
return isinstance(obj, ListTypes)
def is_Sequence(obj, isinstance=isinstance, SequenceTypes=SequenceTypes):
return isinstance(obj, SequenceTypes)
def is_Tuple(obj, isinstance=isinstance, tuple=tuple):
return isinstance(obj, tuple)
def is_String(obj, isinstance=isinstance, StringTypes=StringTypes):
return isinstance(obj, StringTypes)
def is_Scalar(obj, isinstance=isinstance, StringTypes=StringTypes, SequenceTypes=SequenceTypes):
# Profiling shows that there is an impressive speed-up of 2x
# when explicitely checking for strings instead of just not
# sequence when the argument (i.e. obj) is already a string.
# But, if obj is a not string then it is twice as fast to
# check only for 'not sequence'. The following code therefore
# assumes that the obj argument is a string must of the time.
return isinstance(obj, StringTypes) or not isinstance(obj, SequenceTypes)
def do_flatten(sequence, result, isinstance=isinstance,
StringTypes=StringTypes, SequenceTypes=SequenceTypes):
for item in sequence:
if isinstance(item, StringTypes) or not isinstance(item, SequenceTypes):
result.append(item)
else:
do_flatten(item, result)
def flatten(obj, isinstance=isinstance, StringTypes=StringTypes,
SequenceTypes=SequenceTypes, do_flatten=do_flatten):
"""Flatten a sequence to a non-nested list.
Flatten() converts either a single scalar or a nested sequence
to a non-nested list. Note that flatten() considers strings
to be scalars instead of sequences like Python would.
"""
if isinstance(obj, StringTypes) or not isinstance(obj, SequenceTypes):
return [obj]
result = []
for item in obj:
if isinstance(item, StringTypes) or not isinstance(item, SequenceTypes):
result.append(item)
else:
do_flatten(item, result)
return result
def flatten_sequence(sequence, isinstance=isinstance, StringTypes=StringTypes,
SequenceTypes=SequenceTypes, do_flatten=do_flatten):
"""Flatten a sequence to a non-nested list.
Same as flatten(), but it does not handle the single scalar
case. This is slightly more efficient when one knows that
the sequence to flatten can not be a scalar.
"""
result = []
for item in sequence:
if isinstance(item, StringTypes) or not isinstance(item, SequenceTypes):
result.append(item)
else:
do_flatten(item, result)
return result
# Generic convert-to-string functions that abstract away whether or
# not the Python we're executing has Unicode support. The wrapper
# to_String_for_signature() will use a for_signature() method if the
# specified object has one.
#
def to_String(s,
isinstance=isinstance, str=str,
UserString=UserString, BaseStringTypes=BaseStringTypes):
if isinstance(s,BaseStringTypes):
# Early out when already a string!
return s
elif isinstance(s, UserString):
# s.data can only be either a unicode or a regular
# string. Please see the UserString initializer.
return s.data
else:
return str(s)
def to_String_for_subst(s,
isinstance=isinstance, str=str, to_String=to_String,
BaseStringTypes=BaseStringTypes, SequenceTypes=SequenceTypes,
UserString=UserString):
# Note that the test cases are sorted by order of probability.
if isinstance(s, BaseStringTypes):
return s
elif isinstance(s, SequenceTypes):
l = []
for e in s:
l.append(to_String_for_subst(e))
return ' '.join( s )
elif isinstance(s, UserString):
# s.data can only be either a unicode or a regular
# string. Please see the UserString initializer.
return s.data
else:
return str(s)
def to_String_for_signature(obj, to_String_for_subst=to_String_for_subst,
AttributeError=AttributeError):
try:
f = obj.for_signature
except AttributeError:
return to_String_for_subst(obj)
else:
return f()
# The SCons "semi-deep" copy.
#
# This makes separate copies of lists (including UserList objects)
# dictionaries (including UserDict objects) and tuples, but just copies
# references to anything else it finds.
#
# A special case is any object that has a __semi_deepcopy__() method,
# which we invoke to create the copy, which is used by the BuilderDict
# class because of its extra initialization argument.
#
# The dispatch table approach used here is a direct rip-off from the
# normal Python copy module.
_semi_deepcopy_dispatch = d = {}
def _semi_deepcopy_dict(x):
copy = {}
for key, val in x.items():
# The regular Python copy.deepcopy() also deepcopies the key,
# as follows:
#
# copy[semi_deepcopy(key)] = semi_deepcopy(val)
#
# Doesn't seem like we need to, but we'll comment it just in case.
copy[key] = semi_deepcopy(val)
return copy
d[dict] = _semi_deepcopy_dict
def _semi_deepcopy_list(x):
return list(map(semi_deepcopy, x))
d[list] = _semi_deepcopy_list
def _semi_deepcopy_tuple(x):
return tuple(map(semi_deepcopy, x))
d[tuple] = _semi_deepcopy_tuple
def _semi_deepcopy_inst(x):
if hasattr(x, '__semi_deepcopy__'):
return x.__semi_deepcopy__()
elif isinstance(x, UserDict):
return x.__class__(_semi_deepcopy_dict(x))
elif isinstance(x, UserList):
return x.__class__(_semi_deepcopy_list(x))
else:
return x
d[InstanceType] = _semi_deepcopy_inst
def semi_deepcopy(x):
copier = _semi_deepcopy_dispatch.get(type(x))
if copier:
return copier(x)
else:
return x
class Proxy(object):
"""A simple generic Proxy class, forwarding all calls to
subject. So, for the benefit of the python newbie, what does
this really mean? Well, it means that you can take an object, let's
call it 'objA', and wrap it in this Proxy class, with a statement
like this
proxyObj = Proxy(objA),
Then, if in the future, you do something like this
x = proxyObj.var1,
since Proxy does not have a 'var1' attribute (but presumably objA does),
the request actually is equivalent to saying
x = objA.var1
Inherit from this class to create a Proxy.
Note that, with new-style classes, this does *not* work transparently
for Proxy subclasses that use special .__*__() method names, because
those names are now bound to the class, not the individual instances.
You now need to know in advance which .__*__() method names you want
to pass on to the underlying Proxy object, and specifically delegate
their calls like this:
class Foo(Proxy):
__str__ = Delegate('__str__')
"""
def __init__(self, subject):
"""Wrap an object as a Proxy object"""
self._subject = subject
def __getattr__(self, name):
"""Retrieve an attribute from the wrapped object. If the named
attribute doesn't exist, AttributeError is raised"""
return getattr(self._subject, name)
def get(self):
"""Retrieve the entire wrapped object"""
return self._subject
def __cmp__(self, other):
if issubclass(other.__class__, self._subject.__class__):
return cmp(self._subject, other)
return cmp(self.__dict__, other.__dict__)
class Delegate(object):
"""A Python Descriptor class that delegates attribute fetches
to an underlying wrapped subject of a Proxy. Typical use:
class Foo(Proxy):
__str__ = Delegate('__str__')
"""
def __init__(self, attribute):
self.attribute = attribute
def __get__(self, obj, cls):
if isinstance(obj, cls):
return getattr(obj._subject, self.attribute)
else:
return self
# attempt to load the windows registry module:
can_read_reg = 0
try:
import winreg
can_read_reg = 1
hkey_mod = winreg
RegOpenKeyEx = winreg.OpenKeyEx
RegEnumKey = winreg.EnumKey
RegEnumValue = winreg.EnumValue
RegQueryValueEx = winreg.QueryValueEx
RegError = winreg.error
except ImportError:
try:
import win32api
import win32con
can_read_reg = 1
hkey_mod = win32con
RegOpenKeyEx = win32api.RegOpenKeyEx
RegEnumKey = win32api.RegEnumKey
RegEnumValue = win32api.RegEnumValue
RegQueryValueEx = win32api.RegQueryValueEx
RegError = win32api.error
except ImportError:
class _NoError(Exception):
pass
RegError = _NoError
if can_read_reg:
HKEY_CLASSES_ROOT = hkey_mod.HKEY_CLASSES_ROOT
HKEY_LOCAL_MACHINE = hkey_mod.HKEY_LOCAL_MACHINE
HKEY_CURRENT_USER = hkey_mod.HKEY_CURRENT_USER
HKEY_USERS = hkey_mod.HKEY_USERS
def RegGetValue(root, key):
"""This utility function returns a value in the registry
without having to open the key first. Only available on
Windows platforms with a version of Python that can read the
registry. Returns the same thing as
SCons.Util.RegQueryValueEx, except you just specify the entire
path to the value, and don't have to bother opening the key
first. So:
Instead of:
k = SCons.Util.RegOpenKeyEx(SCons.Util.HKEY_LOCAL_MACHINE,
r'SOFTWARE\Microsoft\Windows\CurrentVersion')
out = SCons.Util.RegQueryValueEx(k,
'ProgramFilesDir')
You can write:
out = SCons.Util.RegGetValue(SCons.Util.HKEY_LOCAL_MACHINE,
r'SOFTWARE\Microsoft\Windows\CurrentVersion\ProgramFilesDir')
"""
# I would use os.path.split here, but it's not a filesystem
# path...
p = key.rfind('\\') + 1
keyp = key[:p-1] # -1 to omit trailing slash
val = key[p:]
k = RegOpenKeyEx(root, keyp)
return RegQueryValueEx(k,val)
else:
try:
e = WindowsError
except NameError:
# Make sure we have a definition of WindowsError so we can
# run platform-independent tests of Windows functionality on
# platforms other than Windows. (WindowsError is, in fact, an
# OSError subclass on Windows.)
class WindowsError(OSError):
pass
import builtins
builtins.WindowsError = WindowsError
else:
del e
HKEY_CLASSES_ROOT = None
HKEY_LOCAL_MACHINE = None
HKEY_CURRENT_USER = None
HKEY_USERS = None
def RegGetValue(root, key):
raise WindowsError
def RegOpenKeyEx(root, key):
raise WindowsError
if sys.platform == 'win32':
def WhereIs(file, path=None, pathext=None, reject=[]):
if path is None:
try:
path = os.environ['PATH']
except KeyError:
return None
if is_String(path):
path = path.split(os.pathsep)
if pathext is None:
try:
pathext = os.environ['PATHEXT']
except KeyError:
pathext = '.COM;.EXE;.BAT;.CMD'
if is_String(pathext):
pathext = pathext.split(os.pathsep)
for ext in pathext:
if ext.lower() == file[-len(ext):].lower():
pathext = ['']
break
if not is_List(reject) and not is_Tuple(reject):
reject = [reject]
for dir in path:
f = os.path.join(dir, file)
for ext in pathext:
fext = f + ext
if os.path.isfile(fext):
try:
reject.index(fext)
except ValueError:
return os.path.normpath(fext)
continue
return None
elif os.name == 'os2':
def WhereIs(file, path=None, pathext=None, reject=[]):
if path is None:
try:
path = os.environ['PATH']
except KeyError:
return None
if is_String(path):
path = path.split(os.pathsep)
if pathext is None:
pathext = ['.exe', '.cmd']
for ext in pathext:
if ext.lower() == file[-len(ext):].lower():
pathext = ['']
break
if not is_List(reject) and not is_Tuple(reject):
reject = [reject]
for dir in path:
f = os.path.join(dir, file)
for ext in pathext:
fext = f + ext
if os.path.isfile(fext):
try:
reject.index(fext)
except ValueError:
return os.path.normpath(fext)
continue
return None
else:
def WhereIs(file, path=None, pathext=None, reject=[]):
import stat
if path is None:
try:
path = os.environ['PATH']
except KeyError:
return None
if is_String(path):
path = path.split(os.pathsep)
if not is_List(reject) and not is_Tuple(reject):
reject = [reject]
for d in path:
f = os.path.join(d, file)
if os.path.isfile(f):
try:
st = os.stat(f)
except OSError:
# os.stat() raises OSError, not IOError if the file
# doesn't exist, so in this case we let IOError get
# raised so as to not mask possibly serious disk or
# network issues.
continue
if stat.S_IMODE(st[stat.ST_MODE]) & 0111:
try:
reject.index(f)
except ValueError:
return os.path.normpath(f)
continue
return None
def PrependPath(oldpath, newpath, sep = os.pathsep,
delete_existing=1, canonicalize=None):
"""This prepends newpath elements to the given oldpath. Will only
add any particular path once (leaving the first one it encounters
and ignoring the rest, to preserve path order), and will
os.path.normpath and os.path.normcase all paths to help assure
this. This can also handle the case where the given old path
variable is a list instead of a string, in which case a list will
be returned instead of a string.
Example:
Old Path: "/foo/bar:/foo"
New Path: "/biz/boom:/foo"
Result: "/biz/boom:/foo:/foo/bar"
If delete_existing is 0, then adding a path that exists will
not move it to the beginning; it will stay where it is in the
list.
If canonicalize is not None, it is applied to each element of
newpath before use.
"""
orig = oldpath
is_list = 1
paths = orig
if not is_List(orig) and not is_Tuple(orig):
paths = paths.split(sep)
is_list = 0
if is_String(newpath):
newpaths = newpath.split(sep)
elif not is_List(newpath) and not is_Tuple(newpath):
newpaths = [ newpath ] # might be a Dir
else:
newpaths = newpath
if canonicalize:
newpaths=list(map(canonicalize, newpaths))
if not delete_existing:
# First uniquify the old paths, making sure to
# preserve the first instance (in Unix/Linux,
# the first one wins), and remembering them in normpaths.
# Then insert the new paths at the head of the list
# if they're not already in the normpaths list.
result = []
normpaths = []
for path in paths:
if not path:
continue
normpath = os.path.normpath(os.path.normcase(path))
if normpath not in normpaths:
result.append(path)
normpaths.append(normpath)
newpaths.reverse() # since we're inserting at the head
for path in newpaths:
if not path:
continue
normpath = os.path.normpath(os.path.normcase(path))
if normpath not in normpaths:
result.insert(0, path)
normpaths.append(normpath)
paths = result
else:
newpaths = newpaths + paths # prepend new paths
normpaths = []
paths = []
# now we add them only if they are unique
for path in newpaths:
normpath = os.path.normpath(os.path.normcase(path))
if path and not normpath in normpaths:
paths.append(path)
normpaths.append(normpath)
if is_list:
return paths
else:
return sep.join(paths)
def AppendPath(oldpath, newpath, sep = os.pathsep,
delete_existing=1, canonicalize=None):
"""This appends new path elements to the given old path. Will
only add any particular path once (leaving the last one it
encounters and ignoring the rest, to preserve path order), and
will os.path.normpath and os.path.normcase all paths to help
assure this. This can also handle the case where the given old
path variable is a list instead of a string, in which case a list
will be returned instead of a string.
Example:
Old Path: "/foo/bar:/foo"
New Path: "/biz/boom:/foo"
Result: "/foo/bar:/biz/boom:/foo"
If delete_existing is 0, then adding a path that exists
will not move it to the end; it will stay where it is in the list.
If canonicalize is not None, it is applied to each element of
newpath before use.
"""
orig = oldpath
is_list = 1
paths = orig
if not is_List(orig) and not is_Tuple(orig):
paths = paths.split(sep)
is_list = 0
if is_String(newpath):
newpaths = newpath.split(sep)
elif not is_List(newpath) and not is_Tuple(newpath):
newpaths = [ newpath ] # might be a Dir
else:
newpaths = newpath
if canonicalize:
newpaths=list(map(canonicalize, newpaths))
if not delete_existing:
# add old paths to result, then
# add new paths if not already present
# (I thought about using a dict for normpaths for speed,
# but it's not clear hashing the strings would be faster
# than linear searching these typically short lists.)
result = []
normpaths = []
for path in paths:
if not path:
continue
result.append(path)
normpaths.append(os.path.normpath(os.path.normcase(path)))
for path in newpaths:
if not path:
continue
normpath = os.path.normpath(os.path.normcase(path))
if normpath not in normpaths:
result.append(path)
normpaths.append(normpath)
paths = result
else:
# start w/ new paths, add old ones if not present,
# then reverse.
newpaths = paths + newpaths # append new paths
newpaths.reverse()
normpaths = []
paths = []
# now we add them only if they are unique
for path in newpaths:
normpath = os.path.normpath(os.path.normcase(path))
if path and not normpath in normpaths:
paths.append(path)
normpaths.append(normpath)
paths.reverse()
if is_list:
return paths
else:
return sep.join(paths)
if sys.platform == 'cygwin':
def get_native_path(path):
"""Transforms an absolute path into a native path for the system. In
Cygwin, this converts from a Cygwin path to a Windows one."""
return os.popen('cygpath -w ' + path).read().replace('\n', '')
else:
def get_native_path(path):
"""Transforms an absolute path into a native path for the system.
Non-Cygwin version, just leave the path alone."""
return path
display = DisplayEngine()
def Split(arg):
if is_List(arg) or is_Tuple(arg):
return arg
elif is_String(arg):
return arg.split()
else:
return [arg]
class CLVar(UserList):
"""A class for command-line construction variables.
This is a list that uses Split() to split an initial string along
white-space arguments, and similarly to split any strings that get
added. This allows us to Do the Right Thing with Append() and
Prepend() (as well as straight Python foo = env['VAR'] + 'arg1
arg2') regardless of whether a user adds a list or a string to a
command-line construction variable.
"""
def __init__(self, seq = []):
UserList.__init__(self, Split(seq))
def __add__(self, other):
return UserList.__add__(self, CLVar(other))
def __radd__(self, other):
return UserList.__radd__(self, CLVar(other))
def __coerce__(self, other):
return (self, CLVar(other))
def __str__(self):
return ' '.join(self.data)
# A dictionary that preserves the order in which items are added.
# Submitted by David Benjamin to ActiveState's Python Cookbook web site:
# http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/107747
# Including fixes/enhancements from the follow-on discussions.
class OrderedDict(UserDict):
def __init__(self, dict = None):
self._keys = []
UserDict.__init__(self, dict)
def __delitem__(self, key):
UserDict.__delitem__(self, key)
self._keys.remove(key)
def __setitem__(self, key, item):
UserDict.__setitem__(self, key, item)
if key not in self._keys: self._keys.append(key)
def clear(self):
UserDict.clear(self)
self._keys = []
def copy(self):
dict = OrderedDict()
dict.update(self)
return dict
def items(self):
return list(zip(self._keys, list(self.values())))
def keys(self):
return self._keys[:]
def popitem(self):
try:
key = self._keys[-1]
except IndexError:
raise KeyError('dictionary is empty')
val = self[key]
del self[key]
return (key, val)
def setdefault(self, key, failobj = None):
UserDict.setdefault(self, key, failobj)
if key not in self._keys: self._keys.append(key)
def update(self, dict):
for (key, val) in dict.items():
self.__setitem__(key, val)
def values(self):
return list(map(self.get, self._keys))
class Selector(OrderedDict):
"""A callable ordered dictionary that maps file suffixes to
dictionary values. We preserve the order in which items are added
so that get_suffix() calls always return the first suffix added."""
def __call__(self, env, source, ext=None):
if ext is None:
try:
ext = source[0].suffix
except IndexError:
ext = ""
try:
return self[ext]
except KeyError:
# Try to perform Environment substitution on the keys of
# the dictionary before giving up.
s_dict = {}
for (k,v) in self.items():
if k is not None:
s_k = env.subst(k)
if s_k in s_dict:
# We only raise an error when variables point
# to the same suffix. If one suffix is literal
# and a variable suffix contains this literal,
# the literal wins and we don't raise an error.
raise KeyError(s_dict[s_k][0], k, s_k)
s_dict[s_k] = (k,v)
try:
return s_dict[ext][1]
except KeyError:
try:
return self[None]
except KeyError:
return None
if sys.platform == 'cygwin':
# On Cygwin, os.path.normcase() lies, so just report back the
# fact that the underlying Windows OS is case-insensitive.
def case_sensitive_suffixes(s1, s2):
return 0
else:
def case_sensitive_suffixes(s1, s2):
return (os.path.normcase(s1) != os.path.normcase(s2))
def adjustixes(fname, pre, suf, ensure_suffix=False):
if pre:
path, fn = os.path.split(os.path.normpath(fname))
if fn[:len(pre)] != pre:
fname = os.path.join(path, pre + fn)
# Only append a suffix if the suffix we're going to add isn't already
# there, and if either we've been asked to ensure the specific suffix
# is present or there's no suffix on it at all.
if suf and fname[-len(suf):] != suf and \
(ensure_suffix or not splitext(fname)[1]):
fname = fname + suf
return fname
# From Tim Peters,
# http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52560
# ASPN: Python Cookbook: Remove duplicates from a sequence
# (Also in the printed Python Cookbook.)
def unique(s):
"""Return a list of the elements in s, but without duplicates.
For example, unique([1,2,3,1,2,3]) is some permutation of [1,2,3],
unique("abcabc") some permutation of ["a", "b", "c"], and
unique(([1, 2], [2, 3], [1, 2])) some permutation of
[[2, 3], [1, 2]].
For best speed, all sequence elements should be hashable. Then
unique() will usually work in linear time.
If not possible, the sequence elements should enjoy a total
ordering, and if list(s).sort() doesn't raise TypeError it's
assumed that they do enjoy a total ordering. Then unique() will
usually work in O(N*log2(N)) time.
If that's not possible either, the sequence elements must support
equality-testing. Then unique() will usually work in quadratic
time.
"""
n = len(s)
if n == 0:
return []
# Try using a dict first, as that's the fastest and will usually
# work. If it doesn't work, it will usually fail quickly, so it
# usually doesn't cost much to *try* it. It requires that all the
# sequence elements be hashable, and support equality comparison.
u = {}
try:
for x in s:
u[x] = 1
except TypeError:
pass # move on to the next method
else:
return list(u.keys())
del u
# We can't hash all the elements. Second fastest is to sort,
# which brings the equal elements together; then duplicates are
# easy to weed out in a single pass.
# NOTE: Python's list.sort() was designed to be efficient in the
# presence of many duplicate elements. This isn't true of all
# sort functions in all languages or libraries, so this approach
# is more effective in Python than it may be elsewhere.
try:
t = sorted(s)
except TypeError:
pass # move on to the next method
else:
assert n > 0
last = t[0]
lasti = i = 1
while i < n:
if t[i] != last:
t[lasti] = last = t[i]
lasti = lasti + 1
i = i + 1
return t[:lasti]
del t
# Brute force is all that's left.
u = []
for x in s:
if x not in u:
u.append(x)
return u
# From Alex Martelli,
# http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52560
# ASPN: Python Cookbook: Remove duplicates from a sequence
# First comment, dated 2001/10/13.
# (Also in the printed Python Cookbook.)
def uniquer(seq, idfun=None):
if idfun is None:
def idfun(x): return x
seen = {}
result = []
for item in seq:
marker = idfun(item)
# in old Python versions:
# if seen.has_key(marker)
# but in new ones:
if marker in seen: continue
seen[marker] = 1
result.append(item)
return result
# A more efficient implementation of Alex's uniquer(), this avoids the
# idfun() argument and function-call overhead by assuming that all
# items in the sequence are hashable.
def uniquer_hashables(seq):
seen = {}
result = []
for item in seq:
#if not item in seen:
if item not in seen:
seen[item] = 1
result.append(item)
return result
# Much of the logic here was originally based on recipe 4.9 from the
# Python CookBook, but we had to dumb it way down for Python 1.5.2.
class LogicalLines(object):
def __init__(self, fileobj):
self.fileobj = fileobj
def readline(self):
result = []
while True:
line = self.fileobj.readline()
if not line:
break
if line[-2:] == '\\\n':
result.append(line[:-2])
else:
result.append(line)
break
return ''.join(result)
def readlines(self):
result = []
while True:
line = self.readline()
if not line:
break
result.append(line)
return result
class UniqueList(UserList):
def __init__(self, seq = []):
UserList.__init__(self, seq)
self.unique = True
def __make_unique(self):
if not self.unique:
self.data = uniquer_hashables(self.data)
self.unique = True
def __lt__(self, other):
self.__make_unique()
return UserList.__lt__(self, other)
def __le__(self, other):
self.__make_unique()
return UserList.__le__(self, other)
def __eq__(self, other):
self.__make_unique()
return UserList.__eq__(self, other)
def __ne__(self, other):
self.__make_unique()
return UserList.__ne__(self, other)
def __gt__(self, other):
self.__make_unique()
return UserList.__gt__(self, other)
def __ge__(self, other):
self.__make_unique()
return UserList.__ge__(self, other)
def __cmp__(self, other):
self.__make_unique()
return UserList.__cmp__(self, other)
def __len__(self):
self.__make_unique()
return UserList.__len__(self)
def __getitem__(self, i):
self.__make_unique()
return UserList.__getitem__(self, i)
def __setitem__(self, i, item):
UserList.__setitem__(self, i, item)
self.unique = False
def __getslice__(self, i, j):
self.__make_unique()
return UserList.__getslice__(self, i, j)
def __setslice__(self, i, j, other):
UserList.__setslice__(self, i, j, other)
self.unique = False
def __add__(self, other):
result = UserList.__add__(self, other)
result.unique = False
return result
def __radd__(self, other):
result = UserList.__radd__(self, other)
result.unique = False
return result
def __iadd__(self, other):
result = UserList.__iadd__(self, other)
result.unique = False
return result
def __mul__(self, other):
result = UserList.__mul__(self, other)
result.unique = False
return result
def __rmul__(self, other):
result = UserList.__rmul__(self, other)
result.unique = False
return result
def __imul__(self, other):
result = UserList.__imul__(self, other)
result.unique = False
return result
def append(self, item):
UserList.append(self, item)
self.unique = False
def insert(self, i):
UserList.insert(self, i)
self.unique = False
def count(self, item):
self.__make_unique()
return UserList.count(self, item)
def index(self, item):
self.__make_unique()
return UserList.index(self, item)
def reverse(self):
self.__make_unique()
UserList.reverse(self)
def sort(self, *args, **kwds):
self.__make_unique()
return UserList.sort(self, *args, **kwds)
def extend(self, other):
UserList.extend(self, other)
self.unique = False
class Unbuffered(object):
"""
A proxy class that wraps a file object, flushing after every write,
and delegating everything else to the wrapped object.
"""
def __init__(self, file):
self.file = file
self.softspace = 0 ## backward compatibility; not supported in Py3k
def write(self, arg):
try:
self.file.write(arg)
self.file.flush()
except IOError:
# Stdout might be connected to a pipe that has been closed
# by now. The most likely reason for the pipe being closed
# is that the user has press ctrl-c. It this is the case,
# then SCons is currently shutdown. We therefore ignore
# IOError's here so that SCons can continue and shutdown
# properly so that the .sconsign is correctly written
# before SCons exits.
pass
def __getattr__(self, attr):
return getattr(self.file, attr)
def make_path_relative(path):
""" makes an absolute path name to a relative pathname.
"""
if os.path.isabs(path):
drive_s,path = os.path.splitdrive(path)
import re
if not drive_s:
path=re.compile("/*(.*)").findall(path)[0]
else:
path=path[1:]
assert( not os.path.isabs( path ) ), path
return path
# The original idea for AddMethod() and RenameFunction() come from the
# following post to the ActiveState Python Cookbook:
#
# ASPN: Python Cookbook : Install bound methods in an instance
# http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/223613
#
# That code was a little fragile, though, so the following changes
# have been wrung on it:
#
# * Switched the installmethod() "object" and "function" arguments,
# so the order reflects that the left-hand side is the thing being
# "assigned to" and the right-hand side is the value being assigned.
#
# * Changed explicit type-checking to the "try: klass = object.__class__"
# block in installmethod() below so that it still works with the
# old-style classes that SCons uses.
#
# * Replaced the by-hand creation of methods and functions with use of
# the "new" module, as alluded to in Alex Martelli's response to the
# following Cookbook post:
#
# ASPN: Python Cookbook : Dynamically added methods to a class
# http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/81732
def AddMethod(obj, function, name=None):
"""
Adds either a bound method to an instance or an unbound method to
a class. If name is ommited the name of the specified function
is used by default.
Example:
a = A()
def f(self, x, y):
self.z = x + y
AddMethod(f, A, "add")
a.add(2, 4)
print a.z
AddMethod(lambda self, i: self.l[i], a, "listIndex")
print a.listIndex(5)
"""
if name is None:
name = function.func_name
else:
function = RenameFunction(function, name)
if hasattr(obj, '__class__') and obj.__class__ is not type:
# "obj" is an instance, so it gets a bound method.
setattr(obj, name, MethodType(function, obj, obj.__class__))
else:
# "obj" is a class, so it gets an unbound method.
setattr(obj, name, MethodType(function, None, obj))
def RenameFunction(function, name):
"""
Returns a function identical to the specified function, but with
the specified name.
"""
return FunctionType(function.func_code,
function.func_globals,
name,
function.func_defaults)
md5 = False
def MD5signature(s):
return str(s)
def MD5filesignature(fname, chunksize=65536):
f = open(fname, "rb")
result = f.read()
f.close()
return result
try:
import hashlib
except ImportError:
pass
else:
if hasattr(hashlib, 'md5'):
md5 = True
def MD5signature(s):
m = hashlib.md5()
m.update(str(s))
return m.hexdigest()
def MD5filesignature(fname, chunksize=65536):
m = hashlib.md5()
f = open(fname, "rb")
while True:
blck = f.read(chunksize)
if not blck:
break
m.update(str(blck))
f.close()
return m.hexdigest()
def MD5collect(signatures):
"""
Collects a list of signatures into an aggregate signature.
signatures - a list of signatures
returns - the aggregate signature
"""
if len(signatures) == 1:
return signatures[0]
else:
return MD5signature(', '.join(signatures))
def silent_intern(x):
"""
Perform sys.intern() on the passed argument and return the result.
If the input is ineligible (e.g. a unicode string) the original argument is
returned and no exception is thrown.
"""
try:
return sys.intern(x)
except TypeError:
return x
# From Dinu C. Gherman,
# Python Cookbook, second edition, recipe 6.17, p. 277.
# Also:
# http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/68205
# ASPN: Python Cookbook: Null Object Design Pattern
#TODO??? class Null(object):
class Null(object):
""" Null objects always and reliably "do nothing." """
def __new__(cls, *args, **kwargs):
if not '_instance' in vars(cls):
cls._instance = super(Null, cls).__new__(cls, *args, **kwargs)
return cls._instance
def __init__(self, *args, **kwargs):
pass
def __call__(self, *args, **kwargs):
return self
def __repr__(self):
return "Null(0x%08X)" % id(self)
def __nonzero__(self):
return False
def __getattr__(self, name):
return self
def __setattr__(self, name, value):
return self
def __delattr__(self, name):
return self
class NullSeq(Null):
def __len__(self):
return 0
def __iter__(self):
return iter(())
def __getitem__(self, i):
return self
def __delitem__(self, i):
return self
def __setitem__(self, i, v):
return self
del __revision__
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
| apache-2.0 | 5,895,460,004,294,275,000 | 31.764706 | 96 | 0.598294 | false |
huijunwu/heron | heron/tools/cli/tests/python/opts_unittest.py | 5 | 2120 | #!/usr/bin/env python
# -*- encoding: utf-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
''' opts_unittest.py '''
import unittest2 as unittest
import heron.tools.cli.src.python.opts as opts
#pylint: disable=missing-docstring, no-self-use
class OptsTest(unittest.TestCase):
def setUp(self):
pass
def test_one_opt(self):
opts.clear_config()
opts.set_config('XX', 'xx')
self.assertEqual('xx', opts.get_config('XX'))
def test_two_opts(self):
opts.clear_config()
opts.set_config('XX', 'xx')
opts.set_config('YY', 'yy')
self.assertEqual('xx', opts.get_config('XX'))
self.assertEqual('yy', opts.get_config('YY'))
def test_non_exist_key(self):
opts.clear_config()
opts.set_config('XX', 'xx')
self.assertEqual(None, opts.get_config('YY'))
def test_many_opts(self):
opts.clear_config()
for k in range(1, 100):
key = "key-%d" % (k)
value = "value-%d" % (k)
opts.set_config(key, value)
for k in range(1, 100):
key = "key-%d" % (k)
value = "value-%d" % (k)
self.assertEqual(value, opts.get_config(key))
def test_clear_opts(self):
opts.clear_config()
opts.set_config('YY', 'yy')
self.assertEqual('yy', opts.get_config('YY'))
opts.clear_config()
self.assertEqual(None, opts.get_config('YY'))
def tearDown(self):
opts.clear_config()
| apache-2.0 | -8,596,834,868,011,118,000 | 30.176471 | 63 | 0.668868 | false |
sometallgit/AutoUploader | Python27/Lib/site-packages/rsa/cli.py | 81 | 12185 | # -*- coding: utf-8 -*-
#
# Copyright 2011 Sybren A. StΓΌvel <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Commandline scripts.
These scripts are called by the executables defined in setup.py.
"""
from __future__ import with_statement, print_function
import abc
import sys
from optparse import OptionParser
import rsa
import rsa.bigfile
import rsa.pkcs1
HASH_METHODS = sorted(rsa.pkcs1.HASH_METHODS.keys())
def keygen():
"""Key generator."""
# Parse the CLI options
parser = OptionParser(usage='usage: %prog [options] keysize',
description='Generates a new RSA keypair of "keysize" bits.')
parser.add_option('--pubout', type='string',
help='Output filename for the public key. The public key is '
'not saved if this option is not present. You can use '
'pyrsa-priv2pub to create the public key file later.')
parser.add_option('-o', '--out', type='string',
help='Output filename for the private key. The key is '
'written to stdout if this option is not present.')
parser.add_option('--form',
help='key format of the private and public keys - default PEM',
choices=('PEM', 'DER'), default='PEM')
(cli, cli_args) = parser.parse_args(sys.argv[1:])
if len(cli_args) != 1:
parser.print_help()
raise SystemExit(1)
try:
keysize = int(cli_args[0])
except ValueError:
parser.print_help()
print('Not a valid number: %s' % cli_args[0], file=sys.stderr)
raise SystemExit(1)
print('Generating %i-bit key' % keysize, file=sys.stderr)
(pub_key, priv_key) = rsa.newkeys(keysize)
# Save public key
if cli.pubout:
print('Writing public key to %s' % cli.pubout, file=sys.stderr)
data = pub_key.save_pkcs1(format=cli.form)
with open(cli.pubout, 'wb') as outfile:
outfile.write(data)
# Save private key
data = priv_key.save_pkcs1(format=cli.form)
if cli.out:
print('Writing private key to %s' % cli.out, file=sys.stderr)
with open(cli.out, 'wb') as outfile:
outfile.write(data)
else:
print('Writing private key to stdout', file=sys.stderr)
sys.stdout.write(data)
class CryptoOperation(object):
"""CLI callable that operates with input, output, and a key."""
__metaclass__ = abc.ABCMeta
keyname = 'public' # or 'private'
usage = 'usage: %%prog [options] %(keyname)s_key'
description = None
operation = 'decrypt'
operation_past = 'decrypted'
operation_progressive = 'decrypting'
input_help = 'Name of the file to %(operation)s. Reads from stdin if ' \
'not specified.'
output_help = 'Name of the file to write the %(operation_past)s file ' \
'to. Written to stdout if this option is not present.'
expected_cli_args = 1
has_output = True
key_class = rsa.PublicKey
def __init__(self):
self.usage = self.usage % self.__class__.__dict__
self.input_help = self.input_help % self.__class__.__dict__
self.output_help = self.output_help % self.__class__.__dict__
@abc.abstractmethod
def perform_operation(self, indata, key, cli_args=None):
"""Performs the program's operation.
Implement in a subclass.
:returns: the data to write to the output.
"""
def __call__(self):
"""Runs the program."""
(cli, cli_args) = self.parse_cli()
key = self.read_key(cli_args[0], cli.keyform)
indata = self.read_infile(cli.input)
print(self.operation_progressive.title(), file=sys.stderr)
outdata = self.perform_operation(indata, key, cli_args)
if self.has_output:
self.write_outfile(outdata, cli.output)
def parse_cli(self):
"""Parse the CLI options
:returns: (cli_opts, cli_args)
"""
parser = OptionParser(usage=self.usage, description=self.description)
parser.add_option('-i', '--input', type='string', help=self.input_help)
if self.has_output:
parser.add_option('-o', '--output', type='string', help=self.output_help)
parser.add_option('--keyform',
help='Key format of the %s key - default PEM' % self.keyname,
choices=('PEM', 'DER'), default='PEM')
(cli, cli_args) = parser.parse_args(sys.argv[1:])
if len(cli_args) != self.expected_cli_args:
parser.print_help()
raise SystemExit(1)
return cli, cli_args
def read_key(self, filename, keyform):
"""Reads a public or private key."""
print('Reading %s key from %s' % (self.keyname, filename), file=sys.stderr)
with open(filename, 'rb') as keyfile:
keydata = keyfile.read()
return self.key_class.load_pkcs1(keydata, keyform)
def read_infile(self, inname):
"""Read the input file"""
if inname:
print('Reading input from %s' % inname, file=sys.stderr)
with open(inname, 'rb') as infile:
return infile.read()
print('Reading input from stdin', file=sys.stderr)
return sys.stdin.read()
def write_outfile(self, outdata, outname):
"""Write the output file"""
if outname:
print('Writing output to %s' % outname, file=sys.stderr)
with open(outname, 'wb') as outfile:
outfile.write(outdata)
else:
print('Writing output to stdout', file=sys.stderr)
sys.stdout.write(outdata)
class EncryptOperation(CryptoOperation):
"""Encrypts a file."""
keyname = 'public'
description = ('Encrypts a file. The file must be shorter than the key '
'length in order to be encrypted. For larger files, use the '
'pyrsa-encrypt-bigfile command.')
operation = 'encrypt'
operation_past = 'encrypted'
operation_progressive = 'encrypting'
def perform_operation(self, indata, pub_key, cli_args=None):
"""Encrypts files."""
return rsa.encrypt(indata, pub_key)
class DecryptOperation(CryptoOperation):
"""Decrypts a file."""
keyname = 'private'
description = ('Decrypts a file. The original file must be shorter than '
'the key length in order to have been encrypted. For larger '
'files, use the pyrsa-decrypt-bigfile command.')
operation = 'decrypt'
operation_past = 'decrypted'
operation_progressive = 'decrypting'
key_class = rsa.PrivateKey
def perform_operation(self, indata, priv_key, cli_args=None):
"""Decrypts files."""
return rsa.decrypt(indata, priv_key)
class SignOperation(CryptoOperation):
"""Signs a file."""
keyname = 'private'
usage = 'usage: %%prog [options] private_key hash_method'
description = ('Signs a file, outputs the signature. Choose the hash '
'method from %s' % ', '.join(HASH_METHODS))
operation = 'sign'
operation_past = 'signature'
operation_progressive = 'Signing'
key_class = rsa.PrivateKey
expected_cli_args = 2
output_help = ('Name of the file to write the signature to. Written '
'to stdout if this option is not present.')
def perform_operation(self, indata, priv_key, cli_args):
"""Signs files."""
hash_method = cli_args[1]
if hash_method not in HASH_METHODS:
raise SystemExit('Invalid hash method, choose one of %s' %
', '.join(HASH_METHODS))
return rsa.sign(indata, priv_key, hash_method)
class VerifyOperation(CryptoOperation):
"""Verify a signature."""
keyname = 'public'
usage = 'usage: %%prog [options] public_key signature_file'
description = ('Verifies a signature, exits with status 0 upon success, '
'prints an error message and exits with status 1 upon error.')
operation = 'verify'
operation_past = 'verified'
operation_progressive = 'Verifying'
key_class = rsa.PublicKey
expected_cli_args = 2
has_output = False
def perform_operation(self, indata, pub_key, cli_args):
"""Verifies files."""
signature_file = cli_args[1]
with open(signature_file, 'rb') as sigfile:
signature = sigfile.read()
try:
rsa.verify(indata, signature, pub_key)
except rsa.VerificationError:
raise SystemExit('Verification failed.')
print('Verification OK', file=sys.stderr)
class BigfileOperation(CryptoOperation):
"""CryptoOperation that doesn't read the entire file into memory."""
def __init__(self):
CryptoOperation.__init__(self)
self.file_objects = []
def __del__(self):
"""Closes any open file handles."""
for fobj in self.file_objects:
fobj.close()
def __call__(self):
"""Runs the program."""
(cli, cli_args) = self.parse_cli()
key = self.read_key(cli_args[0], cli.keyform)
# Get the file handles
infile = self.get_infile(cli.input)
outfile = self.get_outfile(cli.output)
# Call the operation
print(self.operation_progressive.title(), file=sys.stderr)
self.perform_operation(infile, outfile, key, cli_args)
def get_infile(self, inname):
"""Returns the input file object"""
if inname:
print('Reading input from %s' % inname, file=sys.stderr)
fobj = open(inname, 'rb')
self.file_objects.append(fobj)
else:
print('Reading input from stdin', file=sys.stderr)
fobj = sys.stdin
return fobj
def get_outfile(self, outname):
"""Returns the output file object"""
if outname:
print('Will write output to %s' % outname, file=sys.stderr)
fobj = open(outname, 'wb')
self.file_objects.append(fobj)
else:
print('Will write output to stdout', file=sys.stderr)
fobj = sys.stdout
return fobj
class EncryptBigfileOperation(BigfileOperation):
"""Encrypts a file to VARBLOCK format."""
keyname = 'public'
description = ('Encrypts a file to an encrypted VARBLOCK file. The file '
'can be larger than the key length, but the output file is only '
'compatible with Python-RSA.')
operation = 'encrypt'
operation_past = 'encrypted'
operation_progressive = 'encrypting'
def perform_operation(self, infile, outfile, pub_key, cli_args=None):
"""Encrypts files to VARBLOCK."""
return rsa.bigfile.encrypt_bigfile(infile, outfile, pub_key)
class DecryptBigfileOperation(BigfileOperation):
"""Decrypts a file in VARBLOCK format."""
keyname = 'private'
description = ('Decrypts an encrypted VARBLOCK file that was encrypted '
'with pyrsa-encrypt-bigfile')
operation = 'decrypt'
operation_past = 'decrypted'
operation_progressive = 'decrypting'
key_class = rsa.PrivateKey
def perform_operation(self, infile, outfile, priv_key, cli_args=None):
"""Decrypts a VARBLOCK file."""
return rsa.bigfile.decrypt_bigfile(infile, outfile, priv_key)
encrypt = EncryptOperation()
decrypt = DecryptOperation()
sign = SignOperation()
verify = VerifyOperation()
encrypt_bigfile = EncryptBigfileOperation()
decrypt_bigfile = DecryptBigfileOperation()
| mit | -5,076,724,540,771,653,000 | 30.81201 | 87 | 0.610309 | false |
ptkool/spark | examples/src/main/python/ml/polynomial_expansion_example.py | 123 | 1522 | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import print_function
# $example on$
from pyspark.ml.feature import PolynomialExpansion
from pyspark.ml.linalg import Vectors
# $example off$
from pyspark.sql import SparkSession
if __name__ == "__main__":
spark = SparkSession\
.builder\
.appName("PolynomialExpansionExample")\
.getOrCreate()
# $example on$
df = spark.createDataFrame([
(Vectors.dense([2.0, 1.0]),),
(Vectors.dense([0.0, 0.0]),),
(Vectors.dense([3.0, -1.0]),)
], ["features"])
polyExpansion = PolynomialExpansion(degree=3, inputCol="features", outputCol="polyFeatures")
polyDF = polyExpansion.transform(df)
polyDF.show(truncate=False)
# $example off$
spark.stop()
| apache-2.0 | -1,349,623,178,291,959,000 | 32.822222 | 96 | 0.707622 | false |
michalbe/servo | src/components/script/dom/bindings/codegen/parser/tests/test_special_method_signature_mismatch.py | 241 | 6622 | def WebIDLTest(parser, harness):
threw = False
try:
parser.parse("""
interface SpecialMethodSignatureMismatch1 {
getter long long foo(long index);
};
""")
results = parser.finish()
except:
threw = True
harness.ok(threw, "Should have thrown.")
threw = False
try:
parser.parse("""
interface SpecialMethodSignatureMismatch2 {
getter void foo(unsigned long index);
};
""")
results = parser.finish()
except:
threw = True
harness.ok(threw, "Should have thrown.")
threw = False
try:
parser.parse("""
interface SpecialMethodSignatureMismatch3 {
getter boolean foo(unsigned long index, boolean extraArg);
};
""")
results = parser.finish()
except:
threw = True
harness.ok(threw, "Should have thrown.")
threw = False
try:
parser.parse("""
interface SpecialMethodSignatureMismatch4 {
getter boolean foo(unsigned long... index);
};
""")
results = parser.finish()
except:
threw = True
harness.ok(threw, "Should have thrown.")
threw = False
try:
parser.parse("""
interface SpecialMethodSignatureMismatch5 {
getter boolean foo(optional unsigned long index);
};
""")
results = parser.finish()
except:
threw = True
harness.ok(threw, "Should have thrown.")
threw = False
try:
parser.parse("""
interface SpecialMethodSignatureMismatch6 {
getter boolean foo();
};
""")
results = parser.finish()
except:
threw = True
harness.ok(threw, "Should have thrown.")
threw = False
try:
parser.parse("""
interface SpecialMethodSignatureMismatch7 {
deleter long long foo(long index);
};
""")
results = parser.finish()
except:
threw = True
harness.ok(threw, "Should have thrown.")
threw = False
try:
parser.parse("""
interface SpecialMethodSignatureMismatch9 {
deleter boolean foo(unsigned long index, boolean extraArg);
};
""")
results = parser.finish()
except:
threw = True
harness.ok(threw, "Should have thrown.")
threw = False
try:
parser.parse("""
interface SpecialMethodSignatureMismatch10 {
deleter boolean foo(unsigned long... index);
};
""")
results = parser.finish()
except:
threw = True
harness.ok(threw, "Should have thrown.")
threw = False
try:
parser.parse("""
interface SpecialMethodSignatureMismatch11 {
deleter boolean foo(optional unsigned long index);
};
""")
results = parser.finish()
except:
threw = True
harness.ok(threw, "Should have thrown.")
threw = False
try:
parser.parse("""
interface SpecialMethodSignatureMismatch12 {
deleter boolean foo();
};
""")
results = parser.finish()
except:
threw = True
harness.ok(threw, "Should have thrown.")
threw = False
try:
parser.parse("""
interface SpecialMethodSignatureMismatch13 {
setter long long foo(long index, long long value);
};
""")
results = parser.finish()
except:
threw = True
harness.ok(threw, "Should have thrown.")
threw = False
try:
parser.parse("""
interface SpecialMethodSignatureMismatch15 {
setter boolean foo(unsigned long index, boolean value, long long extraArg);
};
""")
results = parser.finish()
except:
threw = True
harness.ok(threw, "Should have thrown.")
threw = False
try:
parser.parse("""
interface SpecialMethodSignatureMismatch16 {
setter boolean foo(unsigned long index, boolean... value);
};
""")
results = parser.finish()
except:
threw = True
harness.ok(threw, "Should have thrown.")
threw = False
try:
parser.parse("""
interface SpecialMethodSignatureMismatch17 {
setter boolean foo(unsigned long index, optional boolean value);
};
""")
results = parser.finish()
except:
threw = True
harness.ok(threw, "Should have thrown.")
threw = False
try:
parser.parse("""
interface SpecialMethodSignatureMismatch18 {
setter boolean foo();
};
""")
results = parser.finish()
except:
threw = True
harness.ok(threw, "Should have thrown.")
threw = False
try:
parser.parse("""
interface SpecialMethodSignatureMismatch20 {
creator long long foo(long index, long long value);
};
""")
results = parser.finish()
except:
threw = True
harness.ok(threw, "Should have thrown.")
threw = False
try:
parser.parse("""
interface SpecialMethodSignatureMismatch22 {
creator boolean foo(unsigned long index, boolean value, long long extraArg);
};
""")
results = parser.finish()
except:
threw = True
harness.ok(threw, "Should have thrown.")
threw = False
try:
parser.parse("""
interface SpecialMethodSignatureMismatch23 {
creator boolean foo(unsigned long index, boolean... value);
};
""")
results = parser.finish()
except:
threw = True
harness.ok(threw, "Should have thrown.")
threw = False
try:
parser.parse("""
interface SpecialMethodSignatureMismatch24 {
creator boolean foo(unsigned long index, optional boolean value);
};
""")
results = parser.finish()
except:
threw = True
harness.ok(threw, "Should have thrown.")
threw = False
try:
parser.parse("""
interface SpecialMethodSignatureMismatch25 {
creator boolean foo();
};
""")
results = parser.finish()
except:
threw = True
harness.ok(threw, "Should have thrown.")
| mpl-2.0 | -8,883,394,811,099,408,000 | 21.52381 | 90 | 0.532014 | false |
jward6/academics-python | datastructures/linkedStackTest.py | 1 | 1812 | import unittest
class Empty(Exception):
pass
class Stack:
class _Node:
__slots__ = '_element', '_next'
def __init__(self, e, n):
self._element = e
self._next = n
def __init__(self):
self._head = self._Node(None, None)
self._length = 0
def __len__(self):
return self._length
def is_empty(self):
return self._length == 0
def top(self):
if self.is_empty():
raise Empty('Stack is empty')
return self._head._next._element
def push(self, e):
n = self._Node(e, self._head._next)
self._head._next = n
self._length += 1
def pop(self):
if self.is_empty():
raise Empty('Stack is empty')
n = self._head._next
self._head._next = n._next
self._length -= 1
return n._element
def __iter__(self):
if self.is_empty():
raise Empty('Stack is empty')
n = self._head._next
while(n != None):
yield n._element
n = n._next
class StackTests(unittest.TestCase):
def test_push(self):
s = Stack()
s.push(15)
s.push(20)
self.assertEqual(len(s), 2)
def test_top(self):
s = Stack()
s.push(15)
s.push(20)
self.assertEqual(s.top(), 20)
def test_pop(self):
s = Stack()
s.push(15)
s.push(20)
self.assertEqual(s.pop(), 20)
self.assertEqual(len(s), 1)
self.assertEqual(s.pop(), 15)
def test_iter(self):
s = Stack()
s.push(15)
s.push(20)
s.push(25)
val = 25
for e in s:
self.assertEqual(e, val)
val -= 5
if __name__ == '__main__':
unittest.main()
| mit | 4,390,032,770,302,519,000 | 18.276596 | 43 | 0.475166 | false |
ChrisTruncer/rdpy | rdpy/security/pyDes.py | 2 | 32292 | #############################################################################
# Documentation #
#############################################################################
# Author: Todd Whiteman
# Date: 16th March, 2009
# Verion: 2.0.0
# License: Public Domain - free to do as you wish
# Homepage: http://twhiteman.netfirms.com/des.html
#
# This is a pure python implementation of the DES encryption algorithm.
# It's pure python to avoid portability issues, since most DES
# implementations are programmed in C (for performance reasons).
#
# Triple DES class is also implemented, utilising the DES base. Triple DES
# is either DES-EDE3 with a 24 byte key, or DES-EDE2 with a 16 byte key.
#
# See the README.txt that should come with this python module for the
# implementation methods used.
#
# Thanks to:
# * David Broadwell for ideas, comments and suggestions.
# * Mario Wolff for pointing out and debugging some triple des CBC errors.
# * Santiago Palladino for providing the PKCS5 padding technique.
# * Shaya for correcting the PAD_PKCS5 triple des CBC errors.
#
"""A pure python implementation of the DES and TRIPLE DES encryption algorithms.
Class initialization
--------------------
pyDes.des(key, [mode], [IV], [pad], [padmode])
pyDes.triple_des(key, [mode], [IV], [pad], [padmode])
key -> Bytes containing the encryption key. 8 bytes for DES, 16 or 24 bytes
for Triple DES
mode -> Optional argument for encryption type, can be either
pyDes.ECB (Electronic Code Book) or pyDes.CBC (Cypher Block Chaining)
IV -> Optional Initial Value bytes, must be supplied if using CBC mode.
Length must be 8 bytes.
pad -> Optional argument, set the pad character (PAD_NORMAL) to use during
all encrypt/decrpt operations done with this instance.
padmode -> Optional argument, set the padding mode (PAD_NORMAL or PAD_PKCS5)
to use during all encrypt/decrpt operations done with this instance.
I recommend to use PAD_PKCS5 padding, as then you never need to worry about any
padding issues, as the padding can be removed unambiguously upon decrypting
data that was encrypted using PAD_PKCS5 padmode.
Common methods
--------------
encrypt(data, [pad], [padmode])
decrypt(data, [pad], [padmode])
data -> Bytes to be encrypted/decrypted
pad -> Optional argument. Only when using padmode of PAD_NORMAL. For
encryption, adds this characters to the end of the data block when
data is not a multiple of 8 bytes. For decryption, will remove the
trailing characters that match this pad character from the last 8
bytes of the unencrypted data block.
padmode -> Optional argument, set the padding mode, must be one of PAD_NORMAL
or PAD_PKCS5). Defaults to PAD_NORMAL.
Example
-------
from pyDes import *
data = "Please encrypt my data"
k = des("DESCRYPT", CBC, "\0\0\0\0\0\0\0\0", pad=None, padmode=PAD_PKCS5)
# For Python3, you'll need to use bytes, i.e.:
# data = b"Please encrypt my data"
# k = des(b"DESCRYPT", CBC, b"\0\0\0\0\0\0\0\0", pad=None, padmode=PAD_PKCS5)
d = k.encrypt(data)
print "Encrypted: %r" % d
print "Decrypted: %r" % k.decrypt(d)
assert k.decrypt(d, padmode=PAD_PKCS5) == data
See the module source (pyDes.py) for more examples of use.
You can also run the pyDes.py file without and arguments to see a simple test.
Note: This code was not written for high-end systems needing a fast
implementation, but rather a handy portable solution with small usage.
"""
import sys
# _pythonMajorVersion is used to handle Python2 and Python3 differences.
_pythonMajorVersion = sys.version_info[0]
# Modes of crypting / cyphering
ECB = 0
CBC = 1
# Modes of padding
PAD_NORMAL = 1
PAD_PKCS5 = 2
# PAD_PKCS5: is a method that will unambiguously remove all padding
# characters after decryption, when originally encrypted with
# this padding mode.
# For a good description of the PKCS5 padding technique, see:
# http://www.faqs.org/rfcs/rfc1423.html
# The base class shared by des and triple des.
class _baseDes(object):
def __init__(self, mode=ECB, IV=None, pad=None, padmode=PAD_NORMAL):
if IV:
IV = self._guardAgainstUnicode(IV)
if pad:
pad = self._guardAgainstUnicode(pad)
self.block_size = 8
# Sanity checking of arguments.
if pad and padmode == PAD_PKCS5:
raise ValueError("Cannot use a pad character with PAD_PKCS5")
if IV and len(IV) != self.block_size:
raise ValueError("Invalid Initial Value (IV), must be a multiple of " + str(self.block_size) + " bytes")
# Set the passed in variables
self._mode = mode
self._iv = IV
self._padding = pad
self._padmode = padmode
def getKey(self):
"""getKey() -> bytes"""
return self.__key
def setKey(self, key):
"""Will set the crypting key for this object."""
key = self._guardAgainstUnicode(key)
self.__key = key
def getMode(self):
"""getMode() -> pyDes.ECB or pyDes.CBC"""
return self._mode
def setMode(self, mode):
"""Sets the type of crypting mode, pyDes.ECB or pyDes.CBC"""
self._mode = mode
def getPadding(self):
"""getPadding() -> bytes of length 1. Padding character."""
return self._padding
def setPadding(self, pad):
"""setPadding() -> bytes of length 1. Padding character."""
if pad is not None:
pad = self._guardAgainstUnicode(pad)
self._padding = pad
def getPadMode(self):
"""getPadMode() -> pyDes.PAD_NORMAL or pyDes.PAD_PKCS5"""
return self._padmode
def setPadMode(self, mode):
"""Sets the type of padding mode, pyDes.PAD_NORMAL or pyDes.PAD_PKCS5"""
self._padmode = mode
def getIV(self):
"""getIV() -> bytes"""
return self._iv
def setIV(self, IV):
"""Will set the Initial Value, used in conjunction with CBC mode"""
if not IV or len(IV) != self.block_size:
raise ValueError("Invalid Initial Value (IV), must be a multiple of " + str(self.block_size) + " bytes")
IV = self._guardAgainstUnicode(IV)
self._iv = IV
def _padData(self, data, pad, padmode):
# Pad data depending on the mode
if padmode is None:
# Get the default padding mode.
padmode = self.getPadMode()
if pad and padmode == PAD_PKCS5:
raise ValueError("Cannot use a pad character with PAD_PKCS5")
if padmode == PAD_NORMAL:
if len(data) % self.block_size == 0:
# No padding required.
return data
if not pad:
# Get the default padding.
pad = self.getPadding()
if not pad:
raise ValueError("Data must be a multiple of " + str(self.block_size) + " bytes in length. Use padmode=PAD_PKCS5 or set the pad character.")
data += (self.block_size - (len(data) % self.block_size)) * pad
elif padmode == PAD_PKCS5:
pad_len = 8 - (len(data) % self.block_size)
if _pythonMajorVersion < 3:
data += pad_len * chr(pad_len)
else:
data += bytes([pad_len] * pad_len)
return data
def _unpadData(self, data, pad, padmode):
# Unpad data depending on the mode.
if not data:
return data
if pad and padmode == PAD_PKCS5:
raise ValueError("Cannot use a pad character with PAD_PKCS5")
if padmode is None:
# Get the default padding mode.
padmode = self.getPadMode()
if padmode == PAD_NORMAL:
if not pad:
# Get the default padding.
pad = self.getPadding()
if pad:
data = data[:-self.block_size] + \
data[-self.block_size:].rstrip(pad)
elif padmode == PAD_PKCS5:
if _pythonMajorVersion < 3:
pad_len = ord(data[-1])
else:
pad_len = data[-1]
data = data[:-pad_len]
return data
def _guardAgainstUnicode(self, data):
# Only accept byte strings or ascii unicode values, otherwise
# there is no way to correctly decode the data into bytes.
if _pythonMajorVersion < 3:
if isinstance(data, unicode):
raise ValueError("pyDes can only work with bytes, not Unicode strings.")
else:
if isinstance(data, str):
# Only accept ascii unicode values.
try:
return data.encode('ascii')
except UnicodeEncodeError:
pass
raise ValueError("pyDes can only work with encoded strings, not Unicode.")
return data
#############################################################################
# DES #
#############################################################################
class des(_baseDes):
"""DES encryption/decrytpion class
Supports ECB (Electronic Code Book) and CBC (Cypher Block Chaining) modes.
pyDes.des(key,[mode], [IV])
key -> Bytes containing the encryption key, must be exactly 8 bytes
mode -> Optional argument for encryption type, can be either pyDes.ECB
(Electronic Code Book), pyDes.CBC (Cypher Block Chaining)
IV -> Optional Initial Value bytes, must be supplied if using CBC mode.
Must be 8 bytes in length.
pad -> Optional argument, set the pad character (PAD_NORMAL) to use
during all encrypt/decrpt operations done with this instance.
padmode -> Optional argument, set the padding mode (PAD_NORMAL or
PAD_PKCS5) to use during all encrypt/decrpt operations done
with this instance.
"""
# Permutation and translation tables for DES
__pc1 = [56, 48, 40, 32, 24, 16, 8,
0, 57, 49, 41, 33, 25, 17,
9, 1, 58, 50, 42, 34, 26,
18, 10, 2, 59, 51, 43, 35,
62, 54, 46, 38, 30, 22, 14,
6, 61, 53, 45, 37, 29, 21,
13, 5, 60, 52, 44, 36, 28,
20, 12, 4, 27, 19, 11, 3
]
# number left rotations of pc1
__left_rotations = [
1, 1, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 1
]
# permuted choice key (table 2)
__pc2 = [
13, 16, 10, 23, 0, 4,
2, 27, 14, 5, 20, 9,
22, 18, 11, 3, 25, 7,
15, 6, 26, 19, 12, 1,
40, 51, 30, 36, 46, 54,
29, 39, 50, 44, 32, 47,
43, 48, 38, 55, 33, 52,
45, 41, 49, 35, 28, 31
]
# initial permutation IP
__ip = [57, 49, 41, 33, 25, 17, 9, 1,
59, 51, 43, 35, 27, 19, 11, 3,
61, 53, 45, 37, 29, 21, 13, 5,
63, 55, 47, 39, 31, 23, 15, 7,
56, 48, 40, 32, 24, 16, 8, 0,
58, 50, 42, 34, 26, 18, 10, 2,
60, 52, 44, 36, 28, 20, 12, 4,
62, 54, 46, 38, 30, 22, 14, 6
]
# Expansion table for turning 32 bit blocks into 48 bits
__expansion_table = [
31, 0, 1, 2, 3, 4,
3, 4, 5, 6, 7, 8,
7, 8, 9, 10, 11, 12,
11, 12, 13, 14, 15, 16,
15, 16, 17, 18, 19, 20,
19, 20, 21, 22, 23, 24,
23, 24, 25, 26, 27, 28,
27, 28, 29, 30, 31, 0
]
# The (in)famous S-boxes
__sbox = [
# S1
[14, 4, 13, 1, 2, 15, 11, 8, 3, 10, 6, 12, 5, 9, 0, 7,
0, 15, 7, 4, 14, 2, 13, 1, 10, 6, 12, 11, 9, 5, 3, 8,
4, 1, 14, 8, 13, 6, 2, 11, 15, 12, 9, 7, 3, 10, 5, 0,
15, 12, 8, 2, 4, 9, 1, 7, 5, 11, 3, 14, 10, 0, 6, 13],
# S2
[15, 1, 8, 14, 6, 11, 3, 4, 9, 7, 2, 13, 12, 0, 5, 10,
3, 13, 4, 7, 15, 2, 8, 14, 12, 0, 1, 10, 6, 9, 11, 5,
0, 14, 7, 11, 10, 4, 13, 1, 5, 8, 12, 6, 9, 3, 2, 15,
13, 8, 10, 1, 3, 15, 4, 2, 11, 6, 7, 12, 0, 5, 14, 9],
# S3
[10, 0, 9, 14, 6, 3, 15, 5, 1, 13, 12, 7, 11, 4, 2, 8,
13, 7, 0, 9, 3, 4, 6, 10, 2, 8, 5, 14, 12, 11, 15, 1,
13, 6, 4, 9, 8, 15, 3, 0, 11, 1, 2, 12, 5, 10, 14, 7,
1, 10, 13, 0, 6, 9, 8, 7, 4, 15, 14, 3, 11, 5, 2, 12],
# S4
[7, 13, 14, 3, 0, 6, 9, 10, 1, 2, 8, 5, 11, 12, 4, 15,
13, 8, 11, 5, 6, 15, 0, 3, 4, 7, 2, 12, 1, 10, 14, 9,
10, 6, 9, 0, 12, 11, 7, 13, 15, 1, 3, 14, 5, 2, 8, 4,
3, 15, 0, 6, 10, 1, 13, 8, 9, 4, 5, 11, 12, 7, 2, 14],
# S5
[2, 12, 4, 1, 7, 10, 11, 6, 8, 5, 3, 15, 13, 0, 14, 9,
14, 11, 2, 12, 4, 7, 13, 1, 5, 0, 15, 10, 3, 9, 8, 6,
4, 2, 1, 11, 10, 13, 7, 8, 15, 9, 12, 5, 6, 3, 0, 14,
11, 8, 12, 7, 1, 14, 2, 13, 6, 15, 0, 9, 10, 4, 5, 3],
# S6
[12, 1, 10, 15, 9, 2, 6, 8, 0, 13, 3, 4, 14, 7, 5, 11,
10, 15, 4, 2, 7, 12, 9, 5, 6, 1, 13, 14, 0, 11, 3, 8,
9, 14, 15, 5, 2, 8, 12, 3, 7, 0, 4, 10, 1, 13, 11, 6,
4, 3, 2, 12, 9, 5, 15, 10, 11, 14, 1, 7, 6, 0, 8, 13],
# S7
[4, 11, 2, 14, 15, 0, 8, 13, 3, 12, 9, 7, 5, 10, 6, 1,
13, 0, 11, 7, 4, 9, 1, 10, 14, 3, 5, 12, 2, 15, 8, 6,
1, 4, 11, 13, 12, 3, 7, 14, 10, 15, 6, 8, 0, 5, 9, 2,
6, 11, 13, 8, 1, 4, 10, 7, 9, 5, 0, 15, 14, 2, 3, 12],
# S8
[13, 2, 8, 4, 6, 15, 11, 1, 10, 9, 3, 14, 5, 0, 12, 7,
1, 15, 13, 8, 10, 3, 7, 4, 12, 5, 6, 11, 0, 14, 9, 2,
7, 11, 4, 1, 9, 12, 14, 2, 0, 6, 10, 13, 15, 3, 5, 8,
2, 1, 14, 7, 4, 10, 8, 13, 15, 12, 9, 0, 3, 5, 6, 11],
]
# 32-bit permutation function P used on the output of the S-boxes
__p = [
15, 6, 19, 20, 28, 11,
27, 16, 0, 14, 22, 25,
4, 17, 30, 9, 1, 7,
23,13, 31, 26, 2, 8,
18, 12, 29, 5, 21, 10,
3, 24
]
# final permutation IP^-1
__fp = [
39, 7, 47, 15, 55, 23, 63, 31,
38, 6, 46, 14, 54, 22, 62, 30,
37, 5, 45, 13, 53, 21, 61, 29,
36, 4, 44, 12, 52, 20, 60, 28,
35, 3, 43, 11, 51, 19, 59, 27,
34, 2, 42, 10, 50, 18, 58, 26,
33, 1, 41, 9, 49, 17, 57, 25,
32, 0, 40, 8, 48, 16, 56, 24
]
# Type of crypting being done
ENCRYPT = 0x00
DECRYPT = 0x01
# Initialisation
def __init__(self, key, mode=ECB, IV=None, pad=None, padmode=PAD_NORMAL):
# Sanity checking of arguments.
if len(key) != 8:
raise ValueError("Invalid DES key size. Key must be exactly 8 bytes long.")
_baseDes.__init__(self, mode, IV, pad, padmode)
self.key_size = 8
self.L = []
self.R = []
self.Kn = [ [0] * 48 ] * 16 # 16 48-bit keys (K1 - K16)
self.final = []
self.setKey(key)
def setKey(self, key):
"""Will set the crypting key for this object. Must be 8 bytes."""
_baseDes.setKey(self, key)
self.__create_sub_keys()
def __String_to_BitList(self, data):
"""Turn the string data, into a list of bits (1, 0)'s"""
if _pythonMajorVersion < 3:
# Turn the strings into integers. Python 3 uses a bytes
# class, which already has this behaviour.
data = [ord(c) for c in data]
l = len(data) * 8
result = [0] * l
pos = 0
for ch in data:
i = 7
while i >= 0:
if ch & (1 << i) != 0:
result[pos] = 1
else:
result[pos] = 0
pos += 1
i -= 1
return result
def __BitList_to_String(self, data):
"""Turn the list of bits -> data, into a string"""
result = []
pos = 0
c = 0
while pos < len(data):
c += data[pos] << (7 - (pos % 8))
if (pos % 8) == 7:
result.append(c)
c = 0
pos += 1
if _pythonMajorVersion < 3:
return ''.join([ chr(c) for c in result ])
else:
return bytes(result)
def __permutate(self, table, block):
"""Permutate this block with the specified table"""
return list(map(lambda x: block[x], table))
# Transform the secret key, so that it is ready for data processing
# Create the 16 subkeys, K[1] - K[16]
def __create_sub_keys(self):
"""Create the 16 subkeys K[1] to K[16] from the given key"""
key = self.__permutate(des.__pc1, self.__String_to_BitList(self.getKey()))
i = 0
# Split into Left and Right sections
self.L = key[:28]
self.R = key[28:]
while i < 16:
j = 0
# Perform circular left shifts
while j < des.__left_rotations[i]:
self.L.append(self.L[0])
del self.L[0]
self.R.append(self.R[0])
del self.R[0]
j += 1
# Create one of the 16 subkeys through pc2 permutation
self.Kn[i] = self.__permutate(des.__pc2, self.L + self.R)
i += 1
# Main part of the encryption algorithm, the number cruncher :)
def __des_crypt(self, block, crypt_type):
"""Crypt the block of data through DES bit-manipulation"""
block = self.__permutate(des.__ip, block)
self.L = block[:32]
self.R = block[32:]
# Encryption starts from Kn[1] through to Kn[16]
if crypt_type == des.ENCRYPT:
iteration = 0
iteration_adjustment = 1
# Decryption starts from Kn[16] down to Kn[1]
else:
iteration = 15
iteration_adjustment = -1
i = 0
while i < 16:
# Make a copy of R[i-1], this will later become L[i]
tempR = self.R[:]
# Permutate R[i - 1] to start creating R[i]
self.R = self.__permutate(des.__expansion_table, self.R)
# Exclusive or R[i - 1] with K[i], create B[1] to B[8] whilst here
self.R = list(map(lambda x, y: x ^ y, self.R, self.Kn[iteration]))
B = [self.R[:6], self.R[6:12], self.R[12:18], self.R[18:24], self.R[24:30], self.R[30:36], self.R[36:42], self.R[42:]]
# Optimization: Replaced below commented code with above
#j = 0
#B = []
#while j < len(self.R):
# self.R[j] = self.R[j] ^ self.Kn[iteration][j]
# j += 1
# if j % 6 == 0:
# B.append(self.R[j-6:j])
# Permutate B[1] to B[8] using the S-Boxes
j = 0
Bn = [0] * 32
pos = 0
while j < 8:
# Work out the offsets
m = (B[j][0] << 1) + B[j][5]
n = (B[j][1] << 3) + (B[j][2] << 2) + (B[j][3] << 1) + B[j][4]
# Find the permutation value
v = des.__sbox[j][(m << 4) + n]
# Turn value into bits, add it to result: Bn
Bn[pos] = (v & 8) >> 3
Bn[pos + 1] = (v & 4) >> 2
Bn[pos + 2] = (v & 2) >> 1
Bn[pos + 3] = v & 1
pos += 4
j += 1
# Permutate the concatination of B[1] to B[8] (Bn)
self.R = self.__permutate(des.__p, Bn)
# Xor with L[i - 1]
self.R = list(map(lambda x, y: x ^ y, self.R, self.L))
# Optimization: This now replaces the below commented code
#j = 0
#while j < len(self.R):
# self.R[j] = self.R[j] ^ self.L[j]
# j += 1
# L[i] becomes R[i - 1]
self.L = tempR
i += 1
iteration += iteration_adjustment
# Final permutation of R[16]L[16]
self.final = self.__permutate(des.__fp, self.R + self.L)
return self.final
# Data to be encrypted/decrypted
def crypt(self, data, crypt_type):
"""Crypt the data in blocks, running it through des_crypt()"""
# Error check the data
if not data:
return ''
if len(data) % self.block_size != 0:
if crypt_type == des.DECRYPT: # Decryption must work on 8 byte blocks
raise ValueError("Invalid data length, data must be a multiple of " + str(self.block_size) + " bytes\n.")
if not self.getPadding():
raise ValueError("Invalid data length, data must be a multiple of " + str(self.block_size) + " bytes\n. Try setting the optional padding character")
else:
data += (self.block_size - (len(data) % self.block_size)) * self.getPadding()
# print "Len of data: %f" % (len(data) / self.block_size)
if self.getMode() == CBC:
if self.getIV():
iv = self.__String_to_BitList(self.getIV())
else:
raise ValueError("For CBC mode, you must supply the Initial Value (IV) for ciphering")
# Split the data into blocks, crypting each one seperately
i = 0
dict = {}
result = []
#cached = 0
#lines = 0
while i < len(data):
# Test code for caching encryption results
#lines += 1
#if dict.has_key(data[i:i+8]):
#print "Cached result for: %s" % data[i:i+8]
# cached += 1
# result.append(dict[data[i:i+8]])
# i += 8
# continue
block = self.__String_to_BitList(data[i:i+8])
# Xor with IV if using CBC mode
if self.getMode() == CBC:
if crypt_type == des.ENCRYPT:
block = list(map(lambda x, y: x ^ y, block, iv))
#j = 0
#while j < len(block):
# block[j] = block[j] ^ iv[j]
# j += 1
processed_block = self.__des_crypt(block, crypt_type)
if crypt_type == des.DECRYPT:
processed_block = list(map(lambda x, y: x ^ y, processed_block, iv))
#j = 0
#while j < len(processed_block):
# processed_block[j] = processed_block[j] ^ iv[j]
# j += 1
iv = block
else:
iv = processed_block
else:
processed_block = self.__des_crypt(block, crypt_type)
# Add the resulting crypted block to our list
#d = self.__BitList_to_String(processed_block)
#result.append(d)
result.append(self.__BitList_to_String(processed_block))
#dict[data[i:i+8]] = d
i += 8
# print "Lines: %d, cached: %d" % (lines, cached)
# Return the full crypted string
if _pythonMajorVersion < 3:
return ''.join(result)
else:
return bytes.fromhex('').join(result)
def encrypt(self, data, pad=None, padmode=None):
"""encrypt(data, [pad], [padmode]) -> bytes
data : Bytes to be encrypted
pad : Optional argument for encryption padding. Must only be one byte
padmode : Optional argument for overriding the padding mode.
The data must be a multiple of 8 bytes and will be encrypted
with the already specified key. Data does not have to be a
multiple of 8 bytes if the padding character is supplied, or
the padmode is set to PAD_PKCS5, as bytes will then added to
ensure the be padded data is a multiple of 8 bytes.
"""
data = self._guardAgainstUnicode(data)
if pad is not None:
pad = self._guardAgainstUnicode(pad)
data = self._padData(data, pad, padmode)
return self.crypt(data, des.ENCRYPT)
def decrypt(self, data, pad=None, padmode=None):
"""decrypt(data, [pad], [padmode]) -> bytes
data : Bytes to be encrypted
pad : Optional argument for decryption padding. Must only be one byte
padmode : Optional argument for overriding the padding mode.
The data must be a multiple of 8 bytes and will be decrypted
with the already specified key. In PAD_NORMAL mode, if the
optional padding character is supplied, then the un-encrypted
data will have the padding characters removed from the end of
the bytes. This pad removal only occurs on the last 8 bytes of
the data (last data block). In PAD_PKCS5 mode, the special
padding end markers will be removed from the data after decrypting.
"""
data = self._guardAgainstUnicode(data)
if pad is not None:
pad = self._guardAgainstUnicode(pad)
data = self.crypt(data, des.DECRYPT)
return self._unpadData(data, pad, padmode)
#############################################################################
# Triple DES #
#############################################################################
class triple_des(_baseDes):
"""Triple DES encryption/decrytpion class
This algorithm uses the DES-EDE3 (when a 24 byte key is supplied) or
the DES-EDE2 (when a 16 byte key is supplied) encryption methods.
Supports ECB (Electronic Code Book) and CBC (Cypher Block Chaining) modes.
pyDes.des(key, [mode], [IV])
key -> Bytes containing the encryption key, must be either 16 or
24 bytes long
mode -> Optional argument for encryption type, can be either pyDes.ECB
(Electronic Code Book), pyDes.CBC (Cypher Block Chaining)
IV -> Optional Initial Value bytes, must be supplied if using CBC mode.
Must be 8 bytes in length.
pad -> Optional argument, set the pad character (PAD_NORMAL) to use
during all encrypt/decrpt operations done with this instance.
padmode -> Optional argument, set the padding mode (PAD_NORMAL or
PAD_PKCS5) to use during all encrypt/decrpt operations done
with this instance.
"""
def __init__(self, key, mode=ECB, IV=None, pad=None, padmode=PAD_NORMAL):
_baseDes.__init__(self, mode, IV, pad, padmode)
self.setKey(key)
def setKey(self, key):
"""Will set the crypting key for this object. Either 16 or 24 bytes long."""
self.key_size = 24 # Use DES-EDE3 mode
if len(key) != self.key_size:
if len(key) == 16: # Use DES-EDE2 mode
self.key_size = 16
else:
raise ValueError("Invalid triple DES key size. Key must be either 16 or 24 bytes long")
if self.getMode() == CBC:
if not self.getIV():
# Use the first 8 bytes of the key
self._iv = key[:self.block_size]
if len(self.getIV()) != self.block_size:
raise ValueError("Invalid IV, must be 8 bytes in length")
self.__key1 = des(key[:8], self._mode, self._iv,
self._padding, self._padmode)
self.__key2 = des(key[8:16], self._mode, self._iv,
self._padding, self._padmode)
if self.key_size == 16:
self.__key3 = self.__key1
else:
self.__key3 = des(key[16:], self._mode, self._iv,
self._padding, self._padmode)
_baseDes.setKey(self, key)
# Override setter methods to work on all 3 keys.
def setMode(self, mode):
"""Sets the type of crypting mode, pyDes.ECB or pyDes.CBC"""
_baseDes.setMode(self, mode)
for key in (self.__key1, self.__key2, self.__key3):
key.setMode(mode)
def setPadding(self, pad):
"""setPadding() -> bytes of length 1. Padding character."""
_baseDes.setPadding(self, pad)
for key in (self.__key1, self.__key2, self.__key3):
key.setPadding(pad)
def setPadMode(self, mode):
"""Sets the type of padding mode, pyDes.PAD_NORMAL or pyDes.PAD_PKCS5"""
_baseDes.setPadMode(self, mode)
for key in (self.__key1, self.__key2, self.__key3):
key.setPadMode(mode)
def setIV(self, IV):
"""Will set the Initial Value, used in conjunction with CBC mode"""
_baseDes.setIV(self, IV)
for key in (self.__key1, self.__key2, self.__key3):
key.setIV(IV)
def encrypt(self, data, pad=None, padmode=None):
"""encrypt(data, [pad], [padmode]) -> bytes
data : bytes to be encrypted
pad : Optional argument for encryption padding. Must only be one byte
padmode : Optional argument for overriding the padding mode.
The data must be a multiple of 8 bytes and will be encrypted
with the already specified key. Data does not have to be a
multiple of 8 bytes if the padding character is supplied, or
the padmode is set to PAD_PKCS5, as bytes will then added to
ensure the be padded data is a multiple of 8 bytes.
"""
ENCRYPT = des.ENCRYPT
DECRYPT = des.DECRYPT
data = self._guardAgainstUnicode(data)
if pad is not None:
pad = self._guardAgainstUnicode(pad)
# Pad the data accordingly.
data = self._padData(data, pad, padmode)
if self.getMode() == CBC:
self.__key1.setIV(self.getIV())
self.__key2.setIV(self.getIV())
self.__key3.setIV(self.getIV())
i = 0
result = []
while i < len(data):
block = self.__key1.crypt(data[i:i+8], ENCRYPT)
block = self.__key2.crypt(block, DECRYPT)
block = self.__key3.crypt(block, ENCRYPT)
self.__key1.setIV(block)
self.__key2.setIV(block)
self.__key3.setIV(block)
result.append(block)
i += 8
if _pythonMajorVersion < 3:
return ''.join(result)
else:
return bytes.fromhex('').join(result)
else:
data = self.__key1.crypt(data, ENCRYPT)
data = self.__key2.crypt(data, DECRYPT)
return self.__key3.crypt(data, ENCRYPT)
def decrypt(self, data, pad=None, padmode=None):
"""decrypt(data, [pad], [padmode]) -> bytes
data : bytes to be encrypted
pad : Optional argument for decryption padding. Must only be one byte
padmode : Optional argument for overriding the padding mode.
The data must be a multiple of 8 bytes and will be decrypted
with the already specified key. In PAD_NORMAL mode, if the
optional padding character is supplied, then the un-encrypted
data will have the padding characters removed from the end of
the bytes. This pad removal only occurs on the last 8 bytes of
the data (last data block). In PAD_PKCS5 mode, the special
padding end markers will be removed from the data after
decrypting, no pad character is required for PAD_PKCS5.
"""
ENCRYPT = des.ENCRYPT
DECRYPT = des.DECRYPT
data = self._guardAgainstUnicode(data)
if pad is not None:
pad = self._guardAgainstUnicode(pad)
if self.getMode() == CBC:
self.__key1.setIV(self.getIV())
self.__key2.setIV(self.getIV())
self.__key3.setIV(self.getIV())
i = 0
result = []
while i < len(data):
iv = data[i:i+8]
block = self.__key3.crypt(iv, DECRYPT)
block = self.__key2.crypt(block, ENCRYPT)
block = self.__key1.crypt(block, DECRYPT)
self.__key1.setIV(iv)
self.__key2.setIV(iv)
self.__key3.setIV(iv)
result.append(block)
i += 8
if _pythonMajorVersion < 3:
data = ''.join(result)
else:
data = bytes.fromhex('').join(result)
else:
data = self.__key3.crypt(data, DECRYPT)
data = self.__key2.crypt(data, ENCRYPT)
data = self.__key1.crypt(data, DECRYPT)
return self._unpadData(data, pad, padmode) | gpl-3.0 | -6,038,914,622,644,025,000 | 36.902582 | 164 | 0.52592 | false |
ishay2b/tensorflow | tensorflow/python/kernel_tests/stack_op_test.py | 9 | 11444 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Functional tests for Pack Op."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import errors_impl
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import gradient_checker
from tensorflow.python.ops import variables
from tensorflow.python.platform import test
def np_split_squeeze(array, axis):
axis_len = array.shape[axis]
return [
np.squeeze(
arr, axis=(axis,)) for arr in np.split(
array, axis_len, axis=axis)
]
class StackOpTest(test.TestCase):
def testSimple(self):
np.random.seed(7)
with self.test_session(use_gpu=True):
for shape in (2,), (3,), (2, 3), (3, 2), (4, 3, 2):
for dtype in [np.float32, np.int32, np.int64]:
data = np.random.randn(*shape).astype(dtype)
# Convert [data[0], data[1], ...] separately to tensorflow
# TODO(irving): Remove list() once we handle maps correctly
xs = list(map(constant_op.constant, data))
# Pack back into a single tensorflow tensor
c = array_ops.stack(xs)
self.assertAllEqual(c.eval(), data)
def testSimpleParallel(self):
np.random.seed(7)
with self.test_session(use_gpu=True):
for shape in (2,), (3,), (2, 3), (3, 2), (4, 3, 2):
data = np.random.randn(*shape).astype(np.float32)
xs = list(map(constant_op.constant, data))
c = array_ops.parallel_stack(xs)
self.assertAllEqual(c.eval(), data)
def testConst(self):
np.random.seed(7)
with self.test_session(use_gpu=True):
for shape in (2,), (3,), (2, 3), (3, 2), (4, 3, 2):
for dtype in [np.float32, np.int32, np.int64]:
data = np.random.randn(*shape).astype(dtype)
# Pack back into a single tensorflow tensor directly using np array
c = array_ops.stack(data)
# This is implemented via a Const:
self.assertEqual(c.op.type, "Const")
self.assertAllEqual(c.eval(), data)
# Python lists also work for 1-D case:
if len(shape) == 1:
data_list = list(data)
cl = array_ops.stack(data_list)
self.assertEqual(cl.op.type, "Const")
self.assertAllEqual(cl.eval(), data)
# Verify that shape induction works with shapes produced via const stack
a = constant_op.constant([1, 2, 3, 4, 5, 6])
b = array_ops.reshape(a, array_ops.stack([2, 3]))
self.assertAllEqual(b.get_shape(), [2, 3])
def testConstParallel(self):
np.random.seed(7)
with self.test_session(use_gpu=True):
for shape in (2,), (3,), (2, 3), (3, 2), (4, 3, 2):
data = np.random.randn(*shape).astype(np.float32)
if len(shape) == 1:
data_list = list(data)
cl = array_ops.parallel_stack(data_list)
self.assertAllEqual(cl.eval(), data)
data = np.random.randn(*shape).astype(np.float32)
c = array_ops.parallel_stack(data)
self.assertAllEqual(c.eval(), data)
def testGradientsAxis0(self):
np.random.seed(7)
for shape in (2,), (3,), (2, 3), (3, 2), (4, 3, 2):
data = np.random.randn(*shape)
shapes = [shape[1:]] * shape[0]
with self.test_session(use_gpu=True):
# TODO(irving): Remove list() once we handle maps correctly
xs = list(map(constant_op.constant, data))
c = array_ops.stack(xs)
err = gradient_checker.compute_gradient_error(xs, shapes, c, shape)
self.assertLess(err, 1e-6)
def testGradientsAxis1(self):
np.random.seed(7)
for shape in (2, 3), (3, 2), (4, 3, 2):
data = np.random.randn(*shape)
shapes = [shape[1:]] * shape[0]
out_shape = list(shape[1:])
out_shape.insert(1, shape[0])
with self.test_session(use_gpu=True):
# TODO(irving): Remove list() once we handle maps correctly
xs = list(map(constant_op.constant, data))
c = array_ops.stack(xs, axis=1)
err = gradient_checker.compute_gradient_error(xs, shapes, c, out_shape)
self.assertLess(err, 1e-6)
def testZeroSize(self):
# Verify that stack doesn't crash for zero size inputs
with self.test_session(use_gpu=True):
for shape in (0,), (3, 0), (0, 3):
x = np.zeros((2,) + shape).astype(np.int32)
p = array_ops.stack(list(x)).eval()
self.assertAllEqual(p, x)
p = array_ops.parallel_stack(list(x)).eval()
self.assertAllEqual(p, x)
def testAxis0Default(self):
with self.test_session(use_gpu=True):
t = [constant_op.constant([1, 2, 3]), constant_op.constant([4, 5, 6])]
stacked = array_ops.stack(t).eval()
parallel_stacked = array_ops.parallel_stack(t).eval()
self.assertAllEqual(stacked, np.array([[1, 2, 3], [4, 5, 6]]))
self.assertAllEqual(parallel_stacked, np.array([[1, 2, 3], [4, 5, 6]]))
def testAgainstNumpy(self):
# For 1 to 5 dimensions.
for i in range(1, 6):
expected = np.random.random(np.random.permutation(i) + 1)
# For all the possible axis to split it, including negative indices.
for j in range(-i, i):
test_arrays = np_split_squeeze(expected, j)
with self.test_session(use_gpu=True):
actual_pack = array_ops.stack(test_arrays, axis=j)
self.assertEqual(expected.shape, actual_pack.get_shape())
actual_pack = actual_pack.eval()
actual_stack = array_ops.stack(test_arrays, axis=j)
self.assertEqual(expected.shape, actual_stack.get_shape())
actual_stack = actual_stack.eval()
self.assertNDArrayNear(expected, actual_stack, 1e-6)
def testDimOutOfRange(self):
t = [constant_op.constant([1, 2, 3]), constant_op.constant([4, 5, 6])]
with self.assertRaisesRegexp(ValueError, r"axis = 2 not in \[-2, 2\)"):
array_ops.stack(t, axis=2)
def testDimOutOfNegativeRange(self):
t = [constant_op.constant([1, 2, 3]), constant_op.constant([4, 5, 6])]
with self.assertRaisesRegexp(ValueError, r"axis = -3 not in \[-2, 2\)"):
array_ops.stack(t, axis=-3)
class AutomaticPackingTest(test.TestCase):
def testSimple(self):
with self.test_session(use_gpu=True):
self.assertAllEqual(
[1, 0, 2],
ops.convert_to_tensor([1, constant_op.constant(0), 2]).eval())
self.assertAllEqual([[0, 0, 0], [0, 1, 0], [0, 0, 0]],
ops.convert_to_tensor(
[[0, 0, 0], [0, constant_op.constant(1), 0],
[0, 0, 0]]).eval())
self.assertAllEqual([[0, 0, 0], [0, 1, 0], [0, 0, 0]],
ops.convert_to_tensor(
[[0, 0, 0], constant_op.constant([0, 1, 0]),
[0, 0, 0]]).eval())
self.assertAllEqual([[0, 0, 0], [0, 1, 0], [0, 0, 0]],
ops.convert_to_tensor([
constant_op.constant([0, 0, 0]),
constant_op.constant([0, 1, 0]),
constant_op.constant([0, 0, 0])
]).eval())
def testWithNDArray(self):
with self.test_session(use_gpu=True):
result = ops.convert_to_tensor([[[0., 0.],
constant_op.constant([1., 1.])],
np.array(
[[2., 2.], [3., 3.]],
dtype=np.float32)])
self.assertAllEqual([[[0., 0.], [1., 1.]], [[2., 2.], [3., 3.]]],
result.eval())
def testVariable(self):
with self.test_session(use_gpu=True):
v = variables.Variable(17)
result = ops.convert_to_tensor([[0, 0, 0], [0, v, 0], [0, 0, 0]])
v.initializer.run()
self.assertAllEqual([[0, 0, 0], [0, 17, 0], [0, 0, 0]], result.eval())
v.assign(38).op.run()
self.assertAllEqual([[0, 0, 0], [0, 38, 0], [0, 0, 0]], result.eval())
def testDtype(self):
t_0 = ops.convert_to_tensor([[0., 0., 0.], [0., 0., 0.], [0., 0., 0.]])
self.assertEqual(dtypes.float32, t_0.dtype)
t_1 = ops.convert_to_tensor([[0., 0., 0.], constant_op.constant(
[0., 0., 0.], dtype=dtypes.float64), [0., 0., 0.]])
self.assertEqual(dtypes.float64, t_1.dtype)
t_2 = ops.convert_to_tensor(
[[0., 0., 0.], [0., 0., 0.], [0., 0., 0.]], dtype=dtypes.float64)
self.assertEqual(dtypes.float64, t_2.dtype)
with self.assertRaises(TypeError):
ops.convert_to_tensor([
constant_op.constant(
[0., 0., 0.], dtype=dtypes.float32), constant_op.constant(
[0., 0., 0.], dtype=dtypes.float64), [0., 0., 0.]
])
with self.assertRaises(TypeError):
ops.convert_to_tensor(
[[0., 0., 0.], constant_op.constant(
[0., 0., 0.], dtype=dtypes.float64), [0., 0., 0.]],
dtype=dtypes.float32)
with self.assertRaises(TypeError):
ops.convert_to_tensor(
[constant_op.constant(
[0., 0., 0.], dtype=dtypes.float64)],
dtype=dtypes.float32)
def testPlaceholder(self):
with self.test_session(use_gpu=True):
# Test using placeholder with a defined shape.
ph_0 = array_ops.placeholder(dtypes.int32, shape=[])
result_0 = ops.convert_to_tensor([[0, 0, 0], [0, ph_0, 0], [0, 0, 0]])
self.assertAllEqual(
[[0, 0, 0], [0, 1, 0], [0, 0, 0]], result_0.eval(feed_dict={ph_0: 1}))
self.assertAllEqual(
[[0, 0, 0], [0, 2, 0], [0, 0, 0]], result_0.eval(feed_dict={ph_0: 2}))
# Test using placeholder with an undefined shape.
ph_1 = array_ops.placeholder(dtypes.int32)
result_1 = ops.convert_to_tensor([[0, 0, 0], [0, ph_1, 0], [0, 0, 0]])
self.assertAllEqual(
[[0, 0, 0], [0, 1, 0], [0, 0, 0]], result_1.eval(feed_dict={ph_1: 1}))
self.assertAllEqual(
[[0, 0, 0], [0, 2, 0], [0, 0, 0]], result_1.eval(feed_dict={ph_1: 2}))
def testShapeErrors(self):
# Static shape error.
ph_0 = array_ops.placeholder(dtypes.int32, shape=[1])
with self.assertRaises(ValueError):
ops.convert_to_tensor([[0, 0, 0], [0, ph_0, 0], [0, 0, 0]])
# Dynamic shape error.
ph_1 = array_ops.placeholder(dtypes.int32)
result_1 = ops.convert_to_tensor([[0, 0, 0], [0, ph_1, 0], [0, 0, 0]])
with self.test_session(use_gpu=True):
with self.assertRaises(errors_impl.InvalidArgumentError):
result_1.eval(feed_dict={ph_1: [1]})
if __name__ == "__main__":
test.main()
| apache-2.0 | 3,642,174,143,858,088,400 | 38.874564 | 80 | 0.569818 | false |
LoHChina/nova | nova/tests/unit/virt/libvirt/volume/test_hgst.py | 38 | 2570 | # Copyright 2015 HGST
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from os_brick.initiator import connector
from nova.tests.unit.virt.libvirt.volume import test_volume
from nova.virt.libvirt.volume import hgst
# Actual testing of the os_brick HGST driver done in the os_brick testcases
# Here we're concerned only with the small API shim that connects Nova
# so these will be pretty simple cases.
class LibvirtHGSTVolumeDriverTestCase(test_volume.LibvirtVolumeBaseTestCase):
def test_libvirt_hgst_driver_type(self):
drvr = hgst.LibvirtHGSTVolumeDriver(self.fake_conn)
self.assertIsInstance(drvr.connector, connector.HGSTConnector)
def test_libvirt_hgst_driver_connect(self):
def brick_conn_vol(data):
return {'path': '/dev/space01'}
drvr = hgst.LibvirtHGSTVolumeDriver(self.fake_conn)
drvr.connector.connect_volume = brick_conn_vol
di = {'path': '/dev/space01', 'name': 'space01'}
ci = {'data': di}
drvr.connect_volume(ci, None)
self.assertEqual('/dev/space01',
ci['data']['device_path'])
def test_libvirt_hgst_driver_get_config(self):
drvr = hgst.LibvirtHGSTVolumeDriver(self.fake_conn)
di = {'path': '/dev/space01', 'name': 'space01', 'type': 'raw',
'dev': 'vda1', 'bus': 'pci0', 'device_path': '/dev/space01'}
ci = {'data': di}
conf = drvr.get_config(ci, di)
self.assertEqual('block', conf.source_type)
self.assertEqual('/dev/space01', conf.source_path)
def test_libvirt_hgst_driver_disconnect(self):
drvr = hgst.LibvirtHGSTVolumeDriver(self.fake_conn)
drvr.connector.disconnect_volume = mock.MagicMock()
di = {'path': '/dev/space01', 'name': 'space01', 'type': 'raw',
'dev': 'vda1', 'bus': 'pci0', 'device_path': '/dev/space01'}
ci = {'data': di}
drvr.disconnect_volume(ci, di)
drvr.connector.disconnect_volume.assert_called_once_with(
di, None)
| apache-2.0 | -3,787,594,705,818,517,000 | 39.793651 | 77 | 0.664981 | false |
codester2/devide.johannes | extra/soappy-cvp/SOAPpy/SOAPBuilder.py | 8 | 22127 | """
################################################################################
# Copyright (c) 2003, Pfizer
# Copyright (c) 2001, Cayce Ullman.
# Copyright (c) 2001, Brian Matthews.
#
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# Neither the name of actzero, inc. nor the names of its contributors may
# be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
################################################################################
"""
ident = '$Id: SOAPBuilder.py,v 1.27 2005/02/21 20:24:13 warnes Exp $'
from version import __version__
import cgi
import copy
from wstools.XMLname import toXMLname, fromXMLname
import fpconst
# SOAPpy modules
from Config import Config
from NS import NS
from Types import *
# Test whether this Python version has Types.BooleanType
# If it doesn't have it, then False and True are serialized as integers
try:
BooleanType
pythonHasBooleanType = 1
except NameError:
pythonHasBooleanType = 0
################################################################################
# SOAP Builder
################################################################################
class SOAPBuilder:
_xml_top = '<?xml version="1.0"?>\n'
_xml_enc_top = '<?xml version="1.0" encoding="%s"?>\n'
_env_top = ( '%(ENV_T)s:Envelope\n' + \
' %(ENV_T)s:encodingStyle="%(ENC)s"\n' ) % \
NS.__dict__
_env_bot = '</%(ENV_T)s:Envelope>\n' % NS.__dict__
# Namespaces potentially defined in the Envelope tag.
_env_ns = {NS.ENC: NS.ENC_T, NS.ENV: NS.ENV_T,
NS.XSD: NS.XSD_T, NS.XSD2: NS.XSD2_T, NS.XSD3: NS.XSD3_T,
NS.XSI: NS.XSI_T, NS.XSI2: NS.XSI2_T, NS.XSI3: NS.XSI3_T}
def __init__(self, args = (), kw = {}, method = None, namespace = None,
header = None, methodattrs = None, envelope = 1, encoding = 'UTF-8',
use_refs = 0, config = Config, noroot = 0):
# Test the encoding, raising an exception if it's not known
if encoding != None:
''.encode(encoding)
self.args = args
self.kw = kw
self.envelope = envelope
self.encoding = encoding
self.method = method
self.namespace = namespace
self.header = header
self.methodattrs= methodattrs
self.use_refs = use_refs
self.config = config
self.out = []
self.tcounter = 0
self.ncounter = 1
self.icounter = 1
self.envns = {}
self.ids = {}
self.depth = 0
self.multirefs = []
self.multis = 0
self.body = not isinstance(args, bodyType)
self.noroot = noroot
def build(self):
if Config.debug: print "In build."
ns_map = {}
# Cache whether typing is on or not
typed = self.config.typed
if self.header:
# Create a header.
self.dump(self.header, "Header", typed = typed)
#self.header = None # Wipe it out so no one is using it.
if self.body:
# Call genns to record that we've used SOAP-ENV.
self.depth += 1
body_ns = self.genns(ns_map, NS.ENV)[0]
self.out.append("<%sBody>\n" % body_ns)
if self.method:
# Save the NS map so that it can be restored when we
# fall out of the scope of the method definition
save_ns_map = ns_map.copy()
self.depth += 1
a = ''
if self.methodattrs:
for (k, v) in self.methodattrs.items():
a += ' %s="%s"' % (k, v)
if self.namespace: # Use the namespace info handed to us
methodns, n = self.genns(ns_map, self.namespace)
else:
methodns, n = '', ''
self.out.append('<%s%s%s%s%s>\n' % (
methodns, self.method, n, a, self.genroot(ns_map)))
try:
if type(self.args) != TupleType:
args = (self.args,)
else:
args = self.args
for i in args:
self.dump(i, typed = typed, ns_map = ns_map)
if hasattr(self.config, "argsOrdering") and self.config.argsOrdering.has_key(self.method):
for k in self.config.argsOrdering.get(self.method):
self.dump(self.kw.get(k), k, typed = typed, ns_map = ns_map)
else:
for (k, v) in self.kw.items():
self.dump(v, k, typed = typed, ns_map = ns_map)
except RecursionError:
if self.use_refs == 0:
# restart
b = SOAPBuilder(args = self.args, kw = self.kw,
method = self.method, namespace = self.namespace,
header = self.header, methodattrs = self.methodattrs,
envelope = self.envelope, encoding = self.encoding,
use_refs = 1, config = self.config)
return b.build()
raise
if self.method:
self.out.append("</%s%s>\n" % (methodns, self.method))
# End of the method definition; drop any local namespaces
ns_map = save_ns_map
self.depth -= 1
if self.body:
# dump may add to self.multirefs, but the for loop will keep
# going until it has used all of self.multirefs, even those
# entries added while in the loop.
self.multis = 1
for obj, tag in self.multirefs:
self.dump(obj, tag, typed = typed, ns_map = ns_map)
self.out.append("</%sBody>\n" % body_ns)
self.depth -= 1
if self.envelope:
e = map (lambda ns: ' xmlns:%s="%s"\n' % (ns[1], ns[0]),
self.envns.items())
self.out = ['<', self._env_top] + e + ['>\n'] + \
self.out + \
[self._env_bot]
if self.encoding != None:
self.out.insert(0, self._xml_enc_top % self.encoding)
return ''.join(self.out).encode(self.encoding)
self.out.insert(0, self._xml_top)
return ''.join(self.out)
def gentag(self):
if Config.debug: print "In gentag."
self.tcounter += 1
return "v%d" % self.tcounter
def genns(self, ns_map, nsURI):
if nsURI == None:
return ('', '')
if type(nsURI) == TupleType: # already a tuple
if len(nsURI) == 2:
ns, nsURI = nsURI
else:
ns, nsURI = None, nsURI[0]
else:
ns = None
if ns_map.has_key(nsURI):
return (ns_map[nsURI] + ':', '')
if self._env_ns.has_key(nsURI):
ns = self.envns[nsURI] = ns_map[nsURI] = self._env_ns[nsURI]
return (ns + ':', '')
if not ns:
ns = "ns%d" % self.ncounter
self.ncounter += 1
ns_map[nsURI] = ns
if self.config.buildWithNamespacePrefix:
return (ns + ':', ' xmlns:%s="%s"' % (ns, nsURI))
else:
return ('', ' xmlns="%s"' % (nsURI))
def genroot(self, ns_map):
if self.noroot:
return ''
if self.depth != 2:
return ''
ns, n = self.genns(ns_map, NS.ENC)
return ' %sroot="%d"%s' % (ns, not self.multis, n)
# checkref checks an element to see if it needs to be encoded as a
# multi-reference element or not. If it returns None, the element has
# been handled and the caller can continue with subsequent elements.
# If it returns a string, the string should be included in the opening
# tag of the marshaled element.
def checkref(self, obj, tag, ns_map):
if self.depth < 2:
return ''
if not self.ids.has_key(id(obj)):
n = self.ids[id(obj)] = self.icounter
self.icounter = n + 1
if self.use_refs == 0:
return ''
if self.depth == 2:
return ' id="i%d"' % n
self.multirefs.append((obj, tag))
else:
if self.use_refs == 0:
raise RecursionError, "Cannot serialize recursive object"
n = self.ids[id(obj)]
if self.multis and self.depth == 2:
return ' id="i%d"' % n
self.out.append('<%s href="#i%d"%s/>\n' %
(tag, n, self.genroot(ns_map)))
return None
# dumpers
def dump(self, obj, tag = None, typed = 1, ns_map = {}):
if Config.debug: print "In dump.", "obj=", obj
ns_map = ns_map.copy()
self.depth += 1
if type(tag) not in (NoneType, StringType, UnicodeType):
raise KeyError, "tag must be a string or None"
try:
meth = getattr(self, "dump_" + type(obj).__name__)
except AttributeError:
if type(obj) == LongType:
obj_type = "integer"
elif pythonHasBooleanType and type(obj) == BooleanType:
obj_type = "boolean"
else:
obj_type = type(obj).__name__
self.out.append(self.dumper(None, obj_type, obj, tag, typed,
ns_map, self.genroot(ns_map)))
else:
meth(obj, tag, typed, ns_map)
self.depth -= 1
# generic dumper
def dumper(self, nsURI, obj_type, obj, tag, typed = 1, ns_map = {},
rootattr = '', id = '',
xml = '<%(tag)s%(type)s%(id)s%(attrs)s%(root)s>%(data)s</%(tag)s>\n'):
if Config.debug: print "In dumper."
if nsURI == None:
nsURI = self.config.typesNamespaceURI
tag = tag or self.gentag()
tag = toXMLname(tag) # convert from SOAP 1.2 XML name encoding
a = n = t = ''
if typed and obj_type:
ns, n = self.genns(ns_map, nsURI)
ins = self.genns(ns_map, self.config.schemaNamespaceURI)[0]
t = ' %stype="%s%s"%s' % (ins, ns, obj_type, n)
try: a = obj._marshalAttrs(ns_map, self)
except: pass
try: data = obj._marshalData()
except:
if (obj_type != "string"): # strings are already encoded
data = cgi.escape(str(obj))
else:
data = obj
return xml % {"tag": tag, "type": t, "data": data, "root": rootattr,
"id": id, "attrs": a}
def dump_float(self, obj, tag, typed = 1, ns_map = {}):
if Config.debug: print "In dump_float."
tag = tag or self.gentag()
tag = toXMLname(tag) # convert from SOAP 1.2 XML name encoding
if Config.strict_range:
doubleType(obj)
if fpconst.isPosInf(obj):
obj = "INF"
elif fpconst.isNegInf(obj):
obj = "-INF"
elif fpconst.isNaN(obj):
obj = "NaN"
else:
obj = repr(obj)
# Note: python 'float' is actually a SOAP 'double'.
self.out.append(self.dumper(None, "double", obj, tag, typed, ns_map,
self.genroot(ns_map)))
def dump_string(self, obj, tag, typed = 0, ns_map = {}):
if Config.debug: print "In dump_string."
tag = tag or self.gentag()
tag = toXMLname(tag) # convert from SOAP 1.2 XML name encoding
id = self.checkref(obj, tag, ns_map)
if id == None:
return
try: data = obj._marshalData()
except: data = obj
self.out.append(self.dumper(None, "string", cgi.escape(data), tag,
typed, ns_map, self.genroot(ns_map), id))
dump_str = dump_string # For Python 2.2+
dump_unicode = dump_string
def dump_None(self, obj, tag, typed = 0, ns_map = {}):
if Config.debug: print "In dump_None."
tag = tag or self.gentag()
tag = toXMLname(tag) # convert from SOAP 1.2 XML name encoding
ns = self.genns(ns_map, self.config.schemaNamespaceURI)[0]
self.out.append('<%s %snull="1"%s/>\n' %
(tag, ns, self.genroot(ns_map)))
dump_NoneType = dump_None # For Python 2.2+
def dump_list(self, obj, tag, typed = 1, ns_map = {}):
if Config.debug: print "In dump_list.", "obj=", obj
tag = tag or self.gentag()
tag = toXMLname(tag) # convert from SOAP 1.2 XML name encoding
if type(obj) == InstanceType:
data = obj.data
else:
data = obj
if typed:
id = self.checkref(obj, tag, ns_map)
if id == None:
return
try:
sample = data[0]
empty = 0
except:
# preserve type if present
if getattr(obj,"_typed",None) and getattr(obj,"_type",None):
if getattr(obj, "_complexType", None):
sample = typedArrayType(typed=obj._type,
complexType = obj._complexType)
sample._typename = obj._type
if not getattr(obj,"_ns",None): obj._ns = NS.URN
else:
sample = typedArrayType(typed=obj._type)
else:
sample = structType()
empty = 1
# First scan list to see if all are the same type
same_type = 1
if not empty:
for i in data[1:]:
if type(sample) != type(i) or \
(type(sample) == InstanceType and \
sample.__class__ != i.__class__):
same_type = 0
break
ndecl = ''
if same_type:
if (isinstance(sample, structType)) or \
type(sample) == DictType or \
(isinstance(sample, anyType) and \
(getattr(sample, "_complexType", None) and \
sample._complexType)): # force to urn struct
try:
tns = obj._ns or NS.URN
except:
tns = NS.URN
ns, ndecl = self.genns(ns_map, tns)
try:
typename = sample._typename
except:
typename = "SOAPStruct"
t = ns + typename
elif isinstance(sample, anyType):
ns = sample._validNamespaceURI(self.config.typesNamespaceURI,
self.config.strictNamespaces)
if ns:
ns, ndecl = self.genns(ns_map, ns)
t = ns + str(sample._type)
else:
t = 'ur-type'
else:
typename = type(sample).__name__
# For Python 2.2+
if type(sample) == StringType: typename = 'string'
# HACK: unicode is a SOAP string
if type(sample) == UnicodeType: typename = 'string'
# HACK: python 'float' is actually a SOAP 'double'.
if typename=="float": typename="double"
t = self.genns(ns_map, self.config.typesNamespaceURI)[0] + \
typename
else:
t = self.genns(ns_map, self.config.typesNamespaceURI)[0] + \
"ur-type"
try: a = obj._marshalAttrs(ns_map, self)
except: a = ''
ens, edecl = self.genns(ns_map, NS.ENC)
ins, idecl = self.genns(ns_map, self.config.schemaNamespaceURI)
if typed:
self.out.append(
'<%s %sarrayType="%s[%d]" %stype="%sArray"%s%s%s%s%s%s>\n' %
(tag, ens, t, len(data), ins, ens, ndecl, edecl, idecl,
self.genroot(ns_map), id, a))
if typed:
try: elemsname = obj._elemsname
except: elemsname = "item"
else:
elemsname = tag
for i in data:
self.dump(i, elemsname, not same_type, ns_map)
if typed: self.out.append('</%s>\n' % tag)
dump_tuple = dump_list
def dump_dictionary(self, obj, tag, typed = 1, ns_map = {}):
if Config.debug: print "In dump_dictionary."
tag = tag or self.gentag()
tag = toXMLname(tag) # convert from SOAP 1.2 XML name encoding
id = self.checkref(obj, tag, ns_map)
if id == None:
return
try: a = obj._marshalAttrs(ns_map, self)
except: a = ''
self.out.append('<%s%s%s%s>\n' %
(tag, id, a, self.genroot(ns_map)))
for (k, v) in obj.items():
if k[0] != "_":
self.dump(v, k, 1, ns_map)
self.out.append('</%s>\n' % tag)
dump_dict = dump_dictionary # For Python 2.2+
def dump_instance(self, obj, tag, typed = 1, ns_map = {}):
if Config.debug: print "In dump_instance.", "obj=", obj, "tag=", tag
if not tag:
# If it has a name use it.
if isinstance(obj, anyType) and obj._name:
tag = obj._name
else:
tag = self.gentag()
tag = toXMLname(tag) # convert from SOAP 1.2 XML name encoding
if isinstance(obj, arrayType): # Array
self.dump_list(obj, tag, typed, ns_map)
return
if isinstance(obj, faultType): # Fault
cns, cdecl = self.genns(ns_map, NS.ENC)
vns, vdecl = self.genns(ns_map, NS.ENV)
self.out.append('''<%sFault %sroot="1"%s%s>
<faultcode>%s</faultcode>
<faultstring>%s</faultstring>
''' % (vns, cns, vdecl, cdecl, obj.faultcode, obj.faultstring))
if hasattr(obj, "detail"):
self.dump(obj.detail, "detail", typed, ns_map)
self.out.append("</%sFault>\n" % vns)
return
r = self.genroot(ns_map)
try: a = obj._marshalAttrs(ns_map, self)
except: a = ''
if isinstance(obj, voidType): # void
self.out.append("<%s%s%s></%s>\n" % (tag, a, r, tag))
return
id = self.checkref(obj, tag, ns_map)
if id == None:
return
if isinstance(obj, structType):
# Check for namespace
ndecl = ''
ns = obj._validNamespaceURI(self.config.typesNamespaceURI,
self.config.strictNamespaces)
if ns:
ns, ndecl = self.genns(ns_map, ns)
tag = ns + tag
self.out.append("<%s%s%s%s%s>\n" % (tag, ndecl, id, a, r))
keylist = obj.__dict__.keys()
# first write out items with order information
if hasattr(obj, '_keyord'):
for i in range(len(obj._keyord)):
self.dump(obj._aslist(i), obj._keyord[i], 1, ns_map)
keylist.remove(obj._keyord[i])
# now write out the rest
for k in keylist:
if (k[0] != "_"):
self.dump(getattr(obj,k), k, 1, ns_map)
if isinstance(obj, bodyType):
self.multis = 1
for v, k in self.multirefs:
self.dump(v, k, typed = typed, ns_map = ns_map)
self.out.append('</%s>\n' % tag)
elif isinstance(obj, anyType):
t = ''
if typed:
ns = obj._validNamespaceURI(self.config.typesNamespaceURI,
self.config.strictNamespaces)
if ns:
ons, ondecl = self.genns(ns_map, ns)
ins, indecl = self.genns(ns_map,
self.config.schemaNamespaceURI)
t = ' %stype="%s%s"%s%s' % \
(ins, ons, obj._type, ondecl, indecl)
self.out.append('<%s%s%s%s%s>%s</%s>\n' %
(tag, t, id, a, r, obj._marshalData(), tag))
else: # Some Class
self.out.append('<%s%s%s>\n' % (tag, id, r))
for (k, v) in obj.__dict__.items():
if k[0] != "_":
self.dump(v, k, 1, ns_map)
self.out.append('</%s>\n' % tag)
################################################################################
# SOAPBuilder's more public interface
################################################################################
def buildSOAP(args=(), kw={}, method=None, namespace=None,
header=None, methodattrs=None, envelope=1, encoding='UTF-8',
config=Config, noroot = 0):
t = SOAPBuilder(args=args, kw=kw, method=method, namespace=namespace,
header=header, methodattrs=methodattrs,envelope=envelope,
encoding=encoding, config=config,noroot=noroot)
return t.build()
| bsd-3-clause | 5,413,288,180,619,646,000 | 33.900631 | 102 | 0.501243 | false |
lmazuel/ansible | lib/ansible/modules/source_control/git_config.py | 70 | 7530 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2015, Marius Gedminas <[email protected]>
# (c) 2016, Matthew Gamble <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: git_config
author:
- "Matthew Gamble"
- "Marius Gedminas"
version_added: 2.1
requirements: ['git']
short_description: Read and write git configuration
description:
- The C(git_config) module changes git configuration by invoking 'git config'.
This is needed if you don't want to use M(template) for the entire git
config file (e.g. because you need to change just C(user.email) in
/etc/.git/config). Solutions involving M(command) are cumbersone or
don't work correctly in check mode.
options:
list_all:
description:
- List all settings (optionally limited to a given I(scope))
required: false
choices: [ "yes", "no" ]
default: no
name:
description:
- The name of the setting. If no value is supplied, the value will
be read from the config if it has been set.
required: false
default: null
repo:
description:
- Path to a git repository for reading and writing values from a
specific repo.
required: false
default: null
scope:
description:
- Specify which scope to read/set values from. This is required
when setting config values. If this is set to local, you must
also specify the repo parameter. It defaults to system only when
not using I(list_all)=yes.
required: false
choices: [ "local", "global", "system" ]
default: null
value:
description:
- When specifying the name of a single setting, supply a value to
set that setting to the given value.
required: false
default: null
'''
EXAMPLES = '''
# Set some settings in ~/.gitconfig
- git_config:
name: alias.ci
scope: global
value: commit
- git_config:
name: alias.st
scope: global
value: status
# Or system-wide:
- git_config:
name: alias.remotev
scope: system
value: remote -v
- git_config:
name: core.editor
scope: global
value: vim
# scope=system is the default
- git_config:
name: alias.diffc
value: diff --cached
- git_config:
name: color.ui
value: auto
# Make etckeeper not complain when invoked by cron
- git_config:
name: user.email
repo: /etc
scope: local
value: 'root@{{ ansible_fqdn }}'
# Read individual values from git config
- git_config:
name: alias.ci
scope: global
# scope: system is also assumed when reading values, unless list_all=yes
- git_config:
name: alias.diffc
# Read all values from git config
- git_config:
list_all: yes
scope: global
# When list_all=yes and no scope is specified, you get configuration from all scopes
- git_config:
list_all: yes
# Specify a repository to include local settings
- git_config:
list_all: yes
repo: /path/to/repo.git
'''
RETURN = '''
---
config_value:
description: When list_all=no and value is not set, a string containing the value of the setting in name
returned: success
type: string
sample: "vim"
config_values:
description: When list_all=yes, a dict containing key/value pairs of multiple configuration settings
returned: success
type: dictionary
sample:
core.editor: "vim"
color.ui: "auto"
alias.diffc: "diff --cached"
alias.remotev: "remote -v"
'''
def main():
module = AnsibleModule(
argument_spec=dict(
list_all=dict(required=False, type='bool', default=False),
name=dict(type='str'),
repo=dict(type='path'),
scope=dict(required=False, type='str', choices=['local', 'global', 'system']),
value=dict(required=False)
),
mutually_exclusive=[['list_all', 'name'], ['list_all', 'value']],
required_if=[('scope', 'local', ['repo'])],
required_one_of=[['list_all', 'name']],
supports_check_mode=True,
)
git_path = module.get_bin_path('git', True)
params = module.params
# We check error message for a pattern, so we need to make sure the messages appear in the form we're expecting.
# Set the locale to C to ensure consistent messages.
module.run_command_environ_update = dict(LANG='C', LC_ALL='C', LC_MESSAGES='C', LC_CTYPE='C')
if params['name']:
name = params['name']
else:
name = None
if params['scope']:
scope = params['scope']
elif params['list_all']:
scope = None
else:
scope = 'system'
if params['value']:
new_value = params['value']
else:
new_value = None
args = [git_path, "config", "--includes"]
if params['list_all']:
args.append('-l')
if scope:
args.append("--" + scope)
if name:
args.append(name)
if scope == 'local':
dir = params['repo']
elif params['list_all'] and params['repo']:
# Include local settings from a specific repo when listing all available settings
dir = params['repo']
else:
# Run from root directory to avoid accidentally picking up any local config settings
dir = "/"
(rc, out, err) = module.run_command(' '.join(args), cwd=dir)
if params['list_all'] and scope and rc == 128 and 'unable to read config file' in err:
# This just means nothing has been set at the given scope
module.exit_json(changed=False, msg='', config_values={})
elif rc >= 2:
# If the return code is 1, it just means the option hasn't been set yet, which is fine.
module.fail_json(rc=rc, msg=err, cmd=' '.join(args))
if params['list_all']:
values = out.rstrip().splitlines()
config_values = {}
for value in values:
k, v = value.split('=', 1)
config_values[k] = v
module.exit_json(changed=False, msg='', config_values=config_values)
elif not new_value:
module.exit_json(changed=False, msg='', config_value=out.rstrip())
else:
old_value = out.rstrip()
if old_value == new_value:
module.exit_json(changed=False, msg="")
if not module.check_mode:
new_value_quoted = "'" + new_value + "'"
(rc, out, err) = module.run_command(' '.join(args + [new_value_quoted]), cwd=dir)
if err:
module.fail_json(rc=rc, msg=err, cmd=' '.join(args + [new_value_quoted]))
module.exit_json(
msg='setting changed',
diff=dict(
before_header=' '.join(args),
before=old_value + "\n",
after_header=' '.join(args),
after=new_value + "\n"
),
changed=True
)
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()
| gpl-3.0 | -7,158,663,321,818,635,000 | 28.299611 | 116 | 0.625631 | false |
alon/servo | components/script/dom/bindings/codegen/parser/tests/test_lenientSetter.py | 137 | 1867 | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
def should_throw(parser, harness, message, code):
parser = parser.reset();
threw = False
try:
parser.parse(code)
parser.finish()
except:
threw = True
harness.ok(threw, "Should have thrown: %s" % message)
def WebIDLTest(parser, harness):
# The [LenientSetter] extended attribute MUST take no arguments.
should_throw(parser, harness, "no arguments", """
interface I {
[LenientSetter=X] readonly attribute long A;
};
""")
# An attribute with the [LenientSetter] extended attribute MUST NOT
# also be declared with the [PutForwards] extended attribute.
should_throw(parser, harness, "PutForwards", """
interface I {
[PutForwards=B, LenientSetter] readonly attribute J A;
};
interface J {
attribute long B;
};
""")
# An attribute with the [LenientSetter] extended attribute MUST NOT
# also be declared with the [Replaceable] extended attribute.
should_throw(parser, harness, "Replaceable", """
interface I {
[Replaceable, LenientSetter] readonly attribute J A;
};
""")
# The [LenientSetter] extended attribute MUST NOT be used on an
# attribute that is not read only.
should_throw(parser, harness, "writable attribute", """
interface I {
[LenientSetter] attribute long A;
};
""")
# The [LenientSetter] extended attribute MUST NOT be used on a
# static attribute.
should_throw(parser, harness, "static attribute", """
interface I {
[LenientSetter] static readonly attribute long A;
};
""")
| mpl-2.0 | 5,773,587,906,975,646,000 | 31.189655 | 71 | 0.626138 | false |
aperigault/ansible | lib/ansible/modules/cloud/softlayer/sl_vm.py | 34 | 11435 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2017, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: sl_vm
short_description: create or cancel a virtual instance in SoftLayer
description:
- Creates or cancels SoftLayer instances.
- When created, optionally waits for it to be 'running'.
version_added: "2.1"
options:
instance_id:
description:
- Instance Id of the virtual instance to perform action option.
hostname:
description:
- Hostname to be provided to a virtual instance.
domain:
description:
- Domain name to be provided to a virtual instance.
datacenter:
description:
- Datacenter for the virtual instance to be deployed.
tags:
description:
- Tag or list of tags to be provided to a virtual instance.
hourly:
description:
- Flag to determine if the instance should be hourly billed.
type: bool
default: 'yes'
private:
description:
- Flag to determine if the instance should be private only.
type: bool
default: 'no'
dedicated:
description:
- Flag to determine if the instance should be deployed in dedicated space.
type: bool
default: 'no'
local_disk:
description:
- Flag to determine if local disk should be used for the new instance.
type: bool
default: 'yes'
cpus:
description:
- Count of cpus to be assigned to new virtual instance.
required: true
memory:
description:
- Amount of memory to be assigned to new virtual instance.
required: true
disks:
description:
- List of disk sizes to be assigned to new virtual instance.
required: true
default: [ 25 ]
os_code:
description:
- OS Code to be used for new virtual instance.
image_id:
description:
- Image Template to be used for new virtual instance.
nic_speed:
description:
- NIC Speed to be assigned to new virtual instance.
default: 10
public_vlan:
description:
- VLAN by its Id to be assigned to the public NIC.
private_vlan:
description:
- VLAN by its Id to be assigned to the private NIC.
ssh_keys:
description:
- List of ssh keys by their Id to be assigned to a virtual instance.
post_uri:
description:
- URL of a post provisioning script to be loaded and executed on virtual instance.
state:
description:
- Create, or cancel a virtual instance.
- Specify C(present) for create, C(absent) to cancel.
choices: [ absent, present ]
default: present
wait:
description:
- Flag used to wait for active status before returning.
type: bool
default: 'yes'
wait_time:
description:
- Time in seconds before wait returns.
default: 600
requirements:
- python >= 2.6
- softlayer >= 4.1.1
author:
- Matt Colton (@mcltn)
'''
EXAMPLES = '''
- name: Build instance
hosts: localhost
gather_facts: no
tasks:
- name: Build instance request
sl_vm:
hostname: instance-1
domain: anydomain.com
datacenter: dal09
tags: ansible-module-test
hourly: yes
private: no
dedicated: no
local_disk: yes
cpus: 1
memory: 1024
disks: [25]
os_code: UBUNTU_LATEST
wait: no
- name: Build additional instances
hosts: localhost
gather_facts: no
tasks:
- name: Build instances request
sl_vm:
hostname: "{{ item.hostname }}"
domain: "{{ item.domain }}"
datacenter: "{{ item.datacenter }}"
tags: "{{ item.tags }}"
hourly: "{{ item.hourly }}"
private: "{{ item.private }}"
dedicated: "{{ item.dedicated }}"
local_disk: "{{ item.local_disk }}"
cpus: "{{ item.cpus }}"
memory: "{{ item.memory }}"
disks: "{{ item.disks }}"
os_code: "{{ item.os_code }}"
ssh_keys: "{{ item.ssh_keys }}"
wait: "{{ item.wait }}"
with_items:
- hostname: instance-2
domain: anydomain.com
datacenter: dal09
tags:
- ansible-module-test
- ansible-module-test-slaves
hourly: yes
private: no
dedicated: no
local_disk: yes
cpus: 1
memory: 1024
disks:
- 25
- 100
os_code: UBUNTU_LATEST
ssh_keys: []
wait: True
- hostname: instance-3
domain: anydomain.com
datacenter: dal09
tags:
- ansible-module-test
- ansible-module-test-slaves
hourly: yes
private: no
dedicated: no
local_disk: yes
cpus: 1
memory: 1024
disks:
- 25
- 100
os_code: UBUNTU_LATEST
ssh_keys: []
wait: yes
- name: Cancel instances
hosts: localhost
gather_facts: no
tasks:
- name: Cancel by tag
sl_vm:
state: absent
tags: ansible-module-test
'''
# TODO: Disabled RETURN as it is breaking the build for docs. Needs to be fixed.
RETURN = '''# '''
import json
import time
try:
import SoftLayer
from SoftLayer import VSManager
HAS_SL = True
vsManager = VSManager(SoftLayer.create_client_from_env())
except ImportError:
HAS_SL = False
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.six import string_types
# TODO: get this info from API
STATES = ['present', 'absent']
DATACENTERS = ['ams01', 'ams03', 'che01', 'dal01', 'dal05', 'dal06', 'dal09', 'dal10', 'dal12', 'dal13', 'fra02',
'fra04', 'fra05', 'hkg02', 'hou02', 'lon02', 'lon04', 'lon06', 'mel01', 'mex01', 'mil01', 'mon01',
'osl01', 'par01', 'sao01', 'sea01', 'seo01', 'sjc01', 'sjc03', 'sjc04', 'sng01', 'syd01', 'syd04',
'tok02', 'tor01', 'wdc01', 'wdc04', 'wdc06', 'wdc07']
CPU_SIZES = [1, 2, 4, 8, 16, 32, 56]
MEMORY_SIZES = [1024, 2048, 4096, 6144, 8192, 12288, 16384, 32768, 49152, 65536, 131072, 247808]
INITIALDISK_SIZES = [25, 100]
LOCALDISK_SIZES = [25, 100, 150, 200, 300]
SANDISK_SIZES = [10, 20, 25, 30, 40, 50, 75, 100, 125, 150, 175, 200, 250, 300, 350, 400, 500, 750, 1000, 1500, 2000]
NIC_SPEEDS = [10, 100, 1000]
def create_virtual_instance(module):
instances = vsManager.list_instances(
hostname=module.params.get('hostname'),
domain=module.params.get('domain'),
datacenter=module.params.get('datacenter')
)
if instances:
return False, None
# Check if OS or Image Template is provided (Can't be both, defaults to OS)
if (module.params.get('os_code') is not None and module.params.get('os_code') != ''):
module.params['image_id'] = ''
elif (module.params.get('image_id') is not None and module.params.get('image_id') != ''):
module.params['os_code'] = ''
module.params['disks'] = [] # Blank out disks since it will use the template
else:
return False, None
tags = module.params.get('tags')
if isinstance(tags, list):
tags = ','.join(map(str, module.params.get('tags')))
instance = vsManager.create_instance(
hostname=module.params.get('hostname'),
domain=module.params.get('domain'),
cpus=module.params.get('cpus'),
memory=module.params.get('memory'),
hourly=module.params.get('hourly'),
datacenter=module.params.get('datacenter'),
os_code=module.params.get('os_code'),
image_id=module.params.get('image_id'),
local_disk=module.params.get('local_disk'),
disks=module.params.get('disks'),
ssh_keys=module.params.get('ssh_keys'),
nic_speed=module.params.get('nic_speed'),
private=module.params.get('private'),
public_vlan=module.params.get('public_vlan'),
private_vlan=module.params.get('private_vlan'),
dedicated=module.params.get('dedicated'),
post_uri=module.params.get('post_uri'),
tags=tags,
)
if instance is not None and instance['id'] > 0:
return True, instance
else:
return False, None
def wait_for_instance(module, id):
instance = None
completed = False
wait_timeout = time.time() + module.params.get('wait_time')
while not completed and wait_timeout > time.time():
try:
completed = vsManager.wait_for_ready(id, 10, 2)
if completed:
instance = vsManager.get_instance(id)
except Exception:
completed = False
return completed, instance
def cancel_instance(module):
canceled = True
if module.params.get('instance_id') is None and (module.params.get('tags') or module.params.get('hostname') or module.params.get('domain')):
tags = module.params.get('tags')
if isinstance(tags, string_types):
tags = [module.params.get('tags')]
instances = vsManager.list_instances(tags=tags, hostname=module.params.get('hostname'), domain=module.params.get('domain'))
for instance in instances:
try:
vsManager.cancel_instance(instance['id'])
except Exception:
canceled = False
elif module.params.get('instance_id') and module.params.get('instance_id') != 0:
try:
vsManager.cancel_instance(instance['id'])
except Exception:
canceled = False
else:
return False, None
return canceled, None
def main():
module = AnsibleModule(
argument_spec=dict(
instance_id=dict(type='str'),
hostname=dict(type='str'),
domain=dict(type='str'),
datacenter=dict(type='str', choices=DATACENTERS),
tags=dict(type='str'),
hourly=dict(type='bool', default=True),
private=dict(type='bool', default=False),
dedicated=dict(type='bool', default=False),
local_disk=dict(type='bool', default=True),
cpus=dict(type='int', choices=CPU_SIZES),
memory=dict(type='int', choices=MEMORY_SIZES),
disks=dict(type='list', default=[25]),
os_code=dict(type='str'),
image_id=dict(type='str'),
nic_speed=dict(type='int', choices=NIC_SPEEDS),
public_vlan=dict(type='str'),
private_vlan=dict(type='str'),
ssh_keys=dict(type='list', default=[]),
post_uri=dict(type='str'),
state=dict(type='str', default='present', choices=STATES),
wait=dict(type='bool', default=True),
wait_time=dict(type='int', default=600),
)
)
if not HAS_SL:
module.fail_json(msg='softlayer python library required for this module')
if module.params.get('state') == 'absent':
(changed, instance) = cancel_instance(module)
elif module.params.get('state') == 'present':
(changed, instance) = create_virtual_instance(module)
if module.params.get('wait') is True and instance:
(changed, instance) = wait_for_instance(module, instance['id'])
module.exit_json(changed=changed, instance=json.loads(json.dumps(instance, default=lambda o: o.__dict__)))
if __name__ == '__main__':
main()
| gpl-3.0 | -2,064,034,638,261,271,300 | 29.905405 | 144 | 0.604373 | false |
Serag8/Bachelor | google_appengine/google/appengine/ext/remote_api/remote_api_stub.py | 4 | 31174 | #!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""An apiproxy stub that calls a remote handler via HTTP.
This allows easy remote access to the App Engine datastore, and potentially any
of the other App Engine APIs, using the same interface you use when accessing
the service locally.
An example Python script:
---
from google.appengine.ext import db
from google.appengine.ext.remote_api import remote_api_stub
from myapp import models
import getpass
def auth_func():
return (raw_input('Username:'), getpass.getpass('Password:'))
remote_api_stub.ConfigureRemoteApi(None, '/_ah/remote_api', auth_func,
'my-app.appspot.com')
# Now you can access the remote datastore just as if your code was running on
# App Engine!
houses = models.House.all().fetch(100)
for a_house in q:
a_house.doors += 1
db.put(houses)
---
A few caveats:
- Where possible, avoid iterating over queries. Fetching as many results as you
will need is faster and more efficient. If you don't know how many results
you need, or you need 'all of them', iterating is fine.
- Likewise, it's a good idea to put entities in batches. Instead of calling put
for each individual entity, accumulate them and put them in batches using
db.put(), if you can.
- Requests and responses are still limited to 1MB each, so if you have large
entities or try and fetch or put many of them at once, your requests may fail.
"""
import google
import os
import pickle
import random
import sys
import thread
import threading
import yaml
import hashlib
if os.environ.get('APPENGINE_RUNTIME') == 'python27':
from google.appengine.api import apiproxy_rpc
from google.appengine.api import apiproxy_stub_map
from google.appengine.datastore import datastore_pb
from google.appengine.ext.remote_api import remote_api_pb
from google.appengine.ext.remote_api import remote_api_services
from google.appengine.runtime import apiproxy_errors
else:
from google.appengine.api import apiproxy_rpc
from google.appengine.api import apiproxy_stub_map
from google.appengine.datastore import datastore_pb
from google.appengine.ext.remote_api import remote_api_pb
from google.appengine.ext.remote_api import remote_api_services
from google.appengine.runtime import apiproxy_errors
from google.appengine.tools import appengine_rpc
_REQUEST_ID_HEADER = 'HTTP_X_APPENGINE_REQUEST_ID'
class Error(Exception):
"""Base class for exceptions in this module."""
class ConfigurationError(Error):
"""Exception for configuration errors."""
class UnknownJavaServerError(Error):
"""Exception for exceptions returned from a Java remote_api handler."""
def GetUserAgent():
"""Determines the value of the 'User-agent' header to use for HTTP requests.
Returns:
String containing the 'user-agent' header value, which includes the SDK
version, the platform information, and the version of Python;
e.g., "remote_api/1.0.1 Darwin/9.2.0 Python/2.5.2".
"""
product_tokens = []
product_tokens.append("Google-remote_api/1.0")
product_tokens.append(appengine_rpc.GetPlatformToken())
python_version = ".".join(str(i) for i in sys.version_info)
product_tokens.append("Python/%s" % python_version)
return " ".join(product_tokens)
def GetSourceName():
return "Google-remote_api-1.0"
def HashEntity(entity):
"""Return a very-likely-unique hash of an entity."""
return hashlib.sha1(entity.Encode()).digest()
class TransactionData(object):
"""Encapsulates data about an individual transaction."""
def __init__(self, thread_id, is_xg):
self.thread_id = thread_id
self.preconditions = {}
self.entities = {}
self.is_xg = is_xg
class RemoteStub(object):
"""A stub for calling services on a remote server over HTTP.
You can use this to stub out any service that the remote server supports.
"""
_local = threading.local()
def __init__(self, server, path, _test_stub_map=None):
"""Constructs a new RemoteStub that communicates with the specified server.
Args:
server: An instance of a subclass of
google.appengine.tools.appengine_rpc.AbstractRpcServer.
path: The path to the handler this stub should send requests to.
"""
self._server = server
self._path = path
self._test_stub_map = _test_stub_map
def _PreHookHandler(self, service, call, request, response):
pass
def _PostHookHandler(self, service, call, request, response):
pass
def MakeSyncCall(self, service, call, request, response):
self._PreHookHandler(service, call, request, response)
try:
test_stub = self._test_stub_map and self._test_stub_map.GetStub(service)
if test_stub:
test_stub.MakeSyncCall(service, call, request, response)
else:
self._MakeRealSyncCall(service, call, request, response)
finally:
self._PostHookHandler(service, call, request, response)
@classmethod
def _GetRequestId(cls):
"""Returns the id of the request associated with the current thread."""
return cls._local.request_id
@classmethod
def _SetRequestId(cls, request_id):
"""Set the id of the request associated with the current thread."""
cls._local.request_id = request_id
def _MakeRealSyncCall(self, service, call, request, response):
request_pb = remote_api_pb.Request()
request_pb.set_service_name(service)
request_pb.set_method(call)
request_pb.set_request(request.Encode())
if hasattr(self._local, 'request_id'):
request_pb.set_request_id(self._local.request_id)
response_pb = remote_api_pb.Response()
encoded_request = request_pb.Encode()
encoded_response = self._server.Send(self._path, encoded_request)
response_pb.ParseFromString(encoded_response)
if response_pb.has_application_error():
error_pb = response_pb.application_error()
raise apiproxy_errors.ApplicationError(error_pb.code(),
error_pb.detail())
elif response_pb.has_exception():
raise pickle.loads(response_pb.exception())
elif response_pb.has_java_exception():
raise UnknownJavaServerError("An unknown error has occured in the "
"Java remote_api handler for this call.")
else:
response.ParseFromString(response_pb.response())
def CreateRPC(self):
return apiproxy_rpc.RPC(stub=self)
class RemoteDatastoreStub(RemoteStub):
"""A specialised stub for accessing the App Engine datastore remotely.
A specialised stub is required because there are some datastore operations
that preserve state between calls. This stub makes queries possible.
Transactions on the remote datastore are unfortunately still impossible.
"""
def __init__(self, server, path, default_result_count=20,
_test_stub_map=None):
"""Constructor.
Args:
server: The server name to connect to.
path: The URI path on the server.
default_result_count: The number of items to fetch, by default, in a
datastore Query or Next operation. This affects the batch size of
query iterators.
"""
super(RemoteDatastoreStub, self).__init__(server, path, _test_stub_map)
self.default_result_count = default_result_count
self.__queries = {}
self.__transactions = {}
self.__next_local_cursor = 1
self.__local_cursor_lock = threading.Lock()
self.__next_local_tx = 1
self.__local_tx_lock = threading.Lock()
def MakeSyncCall(self, service, call, request, response):
assert service == 'datastore_v3'
explanation = []
assert request.IsInitialized(explanation), explanation
handler = getattr(self, '_Dynamic_' + call, None)
if handler:
handler(request, response)
else:
super(RemoteDatastoreStub, self).MakeSyncCall(service, call, request,
response)
assert response.IsInitialized(explanation), explanation
def _Dynamic_RunQuery(self, query, query_result, cursor_id = None):
if query.has_transaction():
txdata = self.__transactions[query.transaction().handle()]
tx_result = remote_api_pb.TransactionQueryResult()
super(RemoteDatastoreStub, self).MakeSyncCall(
'remote_datastore', 'TransactionQuery', query, tx_result)
query_result.CopyFrom(tx_result.result())
eg_key = tx_result.entity_group_key()
encoded_eg_key = eg_key.Encode()
eg_hash = None
if tx_result.has_entity_group():
eg_hash = HashEntity(tx_result.entity_group())
old_key, old_hash = txdata.preconditions.get(encoded_eg_key, (None, None))
if old_key is None:
txdata.preconditions[encoded_eg_key] = (eg_key, eg_hash)
elif old_hash != eg_hash:
raise apiproxy_errors.ApplicationError(
datastore_pb.Error.CONCURRENT_TRANSACTION,
'Transaction precondition failed.')
else:
super(RemoteDatastoreStub, self).MakeSyncCall(
'datastore_v3', 'RunQuery', query, query_result)
if cursor_id is None:
self.__local_cursor_lock.acquire()
try:
cursor_id = self.__next_local_cursor
self.__next_local_cursor += 1
finally:
self.__local_cursor_lock.release()
if query_result.more_results():
query.set_offset(query.offset() + query_result.result_size())
if query.has_limit():
query.set_limit(query.limit() - query_result.result_size())
self.__queries[cursor_id] = query
else:
self.__queries[cursor_id] = None
query_result.mutable_cursor().set_cursor(cursor_id)
def _Dynamic_Next(self, next_request, query_result):
assert next_request.offset() == 0
cursor_id = next_request.cursor().cursor()
if cursor_id not in self.__queries:
raise apiproxy_errors.ApplicationError(datastore_pb.Error.BAD_REQUEST,
'Cursor %d not found' % cursor_id)
query = self.__queries[cursor_id]
if query is None:
query_result.set_more_results(False)
return
else:
if next_request.has_count():
query.set_count(next_request.count())
else:
query.clear_count()
self._Dynamic_RunQuery(query, query_result, cursor_id)
query_result.set_skipped_results(0)
def _Dynamic_Get(self, get_request, get_response):
txid = None
if get_request.has_transaction():
txid = get_request.transaction().handle()
txdata = self.__transactions[txid]
assert (txdata.thread_id ==
thread.get_ident()), "Transactions are single-threaded."
keys = [(k, k.Encode()) for k in get_request.key_list()]
new_request = datastore_pb.GetRequest()
for key, enckey in keys:
if enckey not in txdata.entities:
new_request.add_key().CopyFrom(key)
else:
new_request = get_request
if new_request.key_size() > 0:
super(RemoteDatastoreStub, self).MakeSyncCall(
'datastore_v3', 'Get', new_request, get_response)
if txid is not None:
newkeys = new_request.key_list()
entities = get_response.entity_list()
for key, entity in zip(newkeys, entities):
entity_hash = None
if entity.has_entity():
entity_hash = HashEntity(entity.entity())
txdata.preconditions[key.Encode()] = (key, entity_hash)
new_response = datastore_pb.GetResponse()
it = iter(get_response.entity_list())
for key, enckey in keys:
if enckey in txdata.entities:
cached_entity = txdata.entities[enckey][1]
if cached_entity:
new_response.add_entity().mutable_entity().CopyFrom(cached_entity)
else:
new_response.add_entity()
else:
new_entity = it.next()
if new_entity.has_entity():
assert new_entity.entity().key() == key
new_response.add_entity().CopyFrom(new_entity)
else:
new_response.add_entity()
get_response.CopyFrom(new_response)
def _Dynamic_Put(self, put_request, put_response):
if put_request.has_transaction():
entities = put_request.entity_list()
requires_id = lambda x: x.id() == 0 and not x.has_name()
new_ents = [e for e in entities
if requires_id(e.key().path().element_list()[-1])]
id_request = datastore_pb.PutRequest()
txid = put_request.transaction().handle()
txdata = self.__transactions[txid]
assert (txdata.thread_id ==
thread.get_ident()), "Transactions are single-threaded."
if new_ents:
for ent in new_ents:
e = id_request.add_entity()
e.mutable_key().CopyFrom(ent.key())
e.mutable_entity_group()
id_response = datastore_pb.PutResponse()
if txdata.is_xg:
rpc_name = 'GetIDsXG'
else:
rpc_name = 'GetIDs'
super(RemoteDatastoreStub, self).MakeSyncCall(
'remote_datastore', rpc_name, id_request, id_response)
assert id_request.entity_size() == id_response.key_size()
for key, ent in zip(id_response.key_list(), new_ents):
ent.mutable_key().CopyFrom(key)
ent.mutable_entity_group().add_element().CopyFrom(
key.path().element(0))
for entity in entities:
txdata.entities[entity.key().Encode()] = (entity.key(), entity)
put_response.add_key().CopyFrom(entity.key())
else:
super(RemoteDatastoreStub, self).MakeSyncCall(
'datastore_v3', 'Put', put_request, put_response)
def _Dynamic_Delete(self, delete_request, response):
if delete_request.has_transaction():
txid = delete_request.transaction().handle()
txdata = self.__transactions[txid]
assert (txdata.thread_id ==
thread.get_ident()), "Transactions are single-threaded."
for key in delete_request.key_list():
txdata.entities[key.Encode()] = (key, None)
else:
super(RemoteDatastoreStub, self).MakeSyncCall(
'datastore_v3', 'Delete', delete_request, response)
def _Dynamic_BeginTransaction(self, request, transaction):
self.__local_tx_lock.acquire()
try:
txid = self.__next_local_tx
self.__transactions[txid] = TransactionData(thread.get_ident(),
request.allow_multiple_eg())
self.__next_local_tx += 1
finally:
self.__local_tx_lock.release()
transaction.set_handle(txid)
transaction.set_app(request.app())
def _Dynamic_Commit(self, transaction, transaction_response):
txid = transaction.handle()
if txid not in self.__transactions:
raise apiproxy_errors.ApplicationError(
datastore_pb.Error.BAD_REQUEST,
'Transaction %d not found.' % (txid,))
txdata = self.__transactions[txid]
assert (txdata.thread_id ==
thread.get_ident()), "Transactions are single-threaded."
del self.__transactions[txid]
tx = remote_api_pb.TransactionRequest()
tx.set_allow_multiple_eg(txdata.is_xg)
for key, hash in txdata.preconditions.values():
precond = tx.add_precondition()
precond.mutable_key().CopyFrom(key)
if hash:
precond.set_hash(hash)
puts = tx.mutable_puts()
deletes = tx.mutable_deletes()
for key, entity in txdata.entities.values():
if entity:
puts.add_entity().CopyFrom(entity)
else:
deletes.add_key().CopyFrom(key)
super(RemoteDatastoreStub, self).MakeSyncCall(
'remote_datastore', 'Transaction',
tx, datastore_pb.PutResponse())
def _Dynamic_Rollback(self, transaction, transaction_response):
txid = transaction.handle()
self.__local_tx_lock.acquire()
try:
if txid not in self.__transactions:
raise apiproxy_errors.ApplicationError(
datastore_pb.Error.BAD_REQUEST,
'Transaction %d not found.' % (txid,))
txdata = self.__transactions[txid]
assert (txdata.thread_id ==
thread.get_ident()), "Transactions are single-threaded."
del self.__transactions[txid]
finally:
self.__local_tx_lock.release()
def _Dynamic_CreateIndex(self, index, id_response):
raise apiproxy_errors.CapabilityDisabledError(
'The remote datastore does not support index manipulation.')
def _Dynamic_UpdateIndex(self, index, void):
raise apiproxy_errors.CapabilityDisabledError(
'The remote datastore does not support index manipulation.')
def _Dynamic_DeleteIndex(self, index, void):
raise apiproxy_errors.CapabilityDisabledError(
'The remote datastore does not support index manipulation.')
ALL_SERVICES = set(remote_api_services.SERVICE_PB_MAP)
def GetRemoteAppIdFromServer(server, path, remote_token=None):
"""Return the app id from a connection to an existing server.
Args:
server: An appengine_rpc.AbstractRpcServer
path: The path to the remote_api handler for your app
(for example, '/_ah/remote_api').
remote_token: Token to validate that the response was to this request.
Returns:
App ID as reported by the remote server.
Raises:
ConfigurationError: The server returned an invalid response.
"""
if not remote_token:
random.seed()
remote_token = str(random.random())[2:]
remote_token = str(remote_token)
urlargs = {'rtok': remote_token}
response = server.Send(path, payload=None, **urlargs)
if not response.startswith('{'):
raise ConfigurationError(
'Invalid response received from server: %s' % response)
app_info = yaml.load(response)
if not app_info or 'rtok' not in app_info or 'app_id' not in app_info:
raise ConfigurationError('Error parsing app_id lookup response')
if str(app_info['rtok']) != remote_token:
raise ConfigurationError('Token validation failed during app_id lookup. '
'(sent %s, got %s)' % (repr(remote_token),
repr(app_info['rtok'])))
return app_info['app_id']
def ConfigureRemoteApiFromServer(server, path, app_id, services=None,
default_auth_domain=None,
use_remote_datastore=True):
"""Does necessary setup to allow easy remote access to App Engine APIs.
Args:
server: An AbstractRpcServer
path: The path to the remote_api handler for your app
(for example, '/_ah/remote_api').
app_id: The app_id of your app, as declared in app.yaml.
services: A list of services to set up stubs for. If specified, only those
services are configured; by default all supported services are configured.
default_auth_domain: The authentication domain to use by default.
use_remote_datastore: Whether to use RemoteDatastoreStub instead of passing
through datastore requests. RemoteDatastoreStub batches transactional
datastore requests since, in production, datastore requires are scoped to
a single request.
Raises:
urllib2.HTTPError: if app_id is not provided and there is an error while
retrieving it.
ConfigurationError: if there is a error configuring the Remote API.
"""
if services is None:
services = set(ALL_SERVICES)
else:
services = set(services)
unsupported = services.difference(ALL_SERVICES)
if unsupported:
raise ConfigurationError('Unsupported service(s): %s'
% (', '.join(unsupported),))
os.environ['APPLICATION_ID'] = app_id
os.environ.setdefault('AUTH_DOMAIN', default_auth_domain or 'gmail.com')
apiproxy_stub_map.apiproxy = apiproxy_stub_map.APIProxyStubMap()
if 'datastore_v3' in services and use_remote_datastore:
services.remove('datastore_v3')
datastore_stub = RemoteDatastoreStub(server, path)
apiproxy_stub_map.apiproxy.RegisterStub('datastore_v3', datastore_stub)
stub = RemoteStub(server, path)
for service in services:
apiproxy_stub_map.apiproxy.RegisterStub(service, stub)
def GetRemoteAppId(servername,
path,
auth_func,
rpc_server_factory=appengine_rpc.HttpRpcServer,
rtok=None,
secure=False,
save_cookies=False):
"""Get the remote appid as reported at servername/path.
This will also return an AbstractRpcServer server, which can be used with
ConfigureRemoteApiFromServer.
Args:
servername: The hostname your app is deployed on.
path: The path to the remote_api handler for your app
(for example, '/_ah/remote_api').
auth_func: A function that takes no arguments and returns a
(username, password) tuple. This will be called if your application
requires authentication to access the remote_api handler (it should!)
and you do not already have a valid auth cookie.
<app_id>.appspot.com.
rpc_server_factory: A factory to construct the rpc server for the datastore.
rtok: The validation token to sent with app_id lookups. If None, a random
token is used.
secure: Use SSL when communicating with the server.
save_cookies: Forwarded to rpc_server_factory function.
Returns:
(app_id, server): The application ID and an AbstractRpcServer.
"""
server = rpc_server_factory(servername, auth_func, GetUserAgent(),
GetSourceName(), save_cookies=save_cookies,
debug_data=False, secure=secure)
app_id = GetRemoteAppIdFromServer(server, path, rtok)
return app_id, server
_OAUTH_SCOPES = [
'https://www.googleapis.com/auth/appengine.apis',
'https://www.googleapis.com/auth/userinfo.email',
]
def _ConfigureRemoteApiWithKeyFile(servername,
path,
service_account,
key_file_path):
"""Does necessary setup to allow easy remote access to App Engine APIs.
This function uses OAuth2 with a credential derived from service_account and
key_file_path to communicate with App Engine APIs.
Use of this method requires an encryption library to be installed.
Args:
servername: The hostname your app is deployed on (typically,
<app_id>.appspot.com).
path: The path to the remote_api handler for your app
(for example, '/_ah/remote_api').
service_account: The email address of the service account to use for
making OAuth requests.
key_file_path: The path to a .p12 file containing the private key for
service_account.
Returns:
server, a server which may be useful for calling the application directly.
Raises:
urllib2.HTTPError: if app_id is not provided and there is an error while
retrieving it.
ConfigurationError: if there is a error configuring the DatstoreFileStub.
ImportError: if the oauth2client module is not available or an appropriate
encryption library cannot not be found.
IOError: if key_file_path does not exist or cannot be read.
"""
try:
import oauth2client.client
except ImportError, e:
raise ImportError('Use of a key file to access the Remote API '
'requires the oauth2client module: %s' % e)
if not oauth2client.client.HAS_CRYPTO:
raise ImportError('Use of a key file to access the Remote API '
'requires an encryption library. Please install '
'either PyOpenSSL or PyCrypto 2.6 or later.')
with open(key_file_path, 'rb') as key_file:
key = key_file.read()
credentials = oauth2client.client.SignedJwtAssertionCredentials(
service_account,
key,
_OAUTH_SCOPES)
return _ConfigureRemoteApiWithOAuthCredentials(servername,
path,
credentials)
def _ConfigureRemoteApiWithComputeEngineCredential(servername,
path):
"""Does necessary setup to allow easy remote access to App Engine APIs.
This function uses OAuth2 with a credential from the Compute Engine metadata
server to communicate with App Engine APIs.
Args:
servername: The hostname your app is deployed on (typically,
<app_id>.appspot.com).
path: The path to the remote_api handler for your app
(for example, '/_ah/remote_api').
Returns:
server, a server which may be useful for calling the application directly.
Raises:
urllib2.HTTPError: if app_id is not provided and there is an error while
retrieving it.
ConfigurationError: if there is a error configuring the DatstoreFileStub.
ImportError: if the oauth2client or httplib2 module is not available.
"""
try:
import httplib2
import oauth2client
except ImportError, e:
raise ImportError('Use of Compute Engine credentials requires the '
'oauth2client and httplib2 modules: %s' % e)
credentials = oauth2client.gce.AppAssertionCredentials(_OAUTH_SCOPES)
http = httplib2.Http()
credentials.authorize(http)
credentials.refresh(http)
return _ConfigureRemoteApiWithOAuthCredentials(servername,
path,
credentials)
def _ConfigureRemoteApiWithOAuthCredentials(servername,
path,
credentials):
"""Does necessary setup to allow easy remote access to App Engine APIs.
Args:
servername: The hostname your app is deployed on (typically,
<app_id>.appspot.com).
path: The path to the remote_api handler for your app
(for example, '/_ah/remote_api').
credentials: An oauth2client.OAuth2Credentials object.
Returns:
server, a server which may be useful for calling the application directly.
Raises:
urllib2.HTTPError: if app_id is not provided and there is an error while
retrieving it.
ConfigurationError: if there is a error configuring the DatstoreFileStub.
ImportError: if the appengine_rpc_httplib2 module is not available.
"""
try:
from google.appengine.tools import appengine_rpc_httplib2
except ImportError, e:
raise ImportError('Use of OAuth credentials requires the '
'appengine_rpc_httplib2 module. %s' % e)
if not servername:
raise ConfigurationError('servername required')
oauth2_parameters = (
appengine_rpc_httplib2.HttpRpcServerOAuth2.OAuth2Parameters(
access_token=None,
client_id=None,
client_secret=None,
scope=None,
refresh_token=None,
credential_file=None,
credentials=credentials))
return ConfigureRemoteApi(
app_id=None,
path=path,
auth_func=oauth2_parameters,
servername=servername,
rpc_server_factory=appengine_rpc_httplib2.HttpRpcServerOAuth2)
def ConfigureRemoteApi(app_id,
path,
auth_func,
servername=None,
rpc_server_factory=appengine_rpc.HttpRpcServer,
rtok=None,
secure=False,
services=None,
default_auth_domain=None,
save_cookies=False,
use_remote_datastore=True):
"""Does necessary setup to allow easy remote access to App Engine APIs.
Either servername must be provided or app_id must not be None. If app_id
is None and a servername is provided, this function will send a request
to the server to retrieve the app_id.
Note that if the app_id is specified, the internal appid must be used;
this may include a partition and a domain. It is often easier to let
remote_api_stub retrieve the app_id automatically.
Args:
app_id: The app_id of your app, as declared in app.yaml, or None.
path: The path to the remote_api handler for your app
(for example, '/_ah/remote_api').
auth_func: A function that takes no arguments and returns a
(username, password) tuple. This will be called if your application
requires authentication to access the remote_api handler (it should!)
and you do not already have a valid auth cookie.
servername: The hostname your app is deployed on. Defaults to
<app_id>.appspot.com.
rpc_server_factory: A factory to construct the rpc server for the datastore.
rtok: The validation token to sent with app_id lookups. If None, a random
token is used.
secure: Use SSL when communicating with the server.
services: A list of services to set up stubs for. If specified, only those
services are configured; by default all supported services are configured.
default_auth_domain: The authentication domain to use by default.
save_cookies: Forwarded to rpc_server_factory function.
use_remote_datastore: Whether to use RemoteDatastoreStub instead of passing
through datastore requests. RemoteDatastoreStub batches transactional
datastore requests since, in production, datastore requires are scoped to
a single request.
Returns:
server, the server created by rpc_server_factory, which may be useful for
calling the application directly.
Raises:
urllib2.HTTPError: if app_id is not provided and there is an error while
retrieving it.
ConfigurationError: if there is a error configuring the DatstoreFileStub.
"""
if not servername and not app_id:
raise ConfigurationError('app_id or servername required')
if not servername:
servername = '%s.appspot.com' % (app_id,)
server = rpc_server_factory(servername, auth_func, GetUserAgent(),
GetSourceName(), save_cookies=save_cookies,
debug_data=False, secure=secure)
if not app_id:
app_id = GetRemoteAppIdFromServer(server, path, rtok)
ConfigureRemoteApiFromServer(server, path, app_id, services,
default_auth_domain, use_remote_datastore)
return server
def MaybeInvokeAuthentication():
"""Sends an empty request through to the configured end-point.
If authentication is necessary, this will cause the rpc_server to invoke
interactive authentication.
"""
datastore_stub = apiproxy_stub_map.apiproxy.GetStub('datastore_v3')
if isinstance(datastore_stub, RemoteStub):
datastore_stub._server.Send(datastore_stub._path, payload=None)
else:
raise ConfigurationError('remote_api is not configured.')
ConfigureRemoteDatastore = ConfigureRemoteApi
| mit | -2,244,815,479,147,907,600 | 33.831285 | 80 | 0.665747 | false |
chrismeyersfsu/webapp-bills | form.app.engine.py | 1 | 19113 | import cgi
import urllib, urllib2, Cookie
import cookielib
import re
import logging
from urlparse import urlparse
from django.utils import simplejson
from google.appengine.api import users
from google.appengine.ext import webapp
from google.appengine.ext.webapp.util import run_wsgi_app
from google.appengine.api import urlfetch
################################################################################
class URLOpener:
def __init__(self):
self.cookie = Cookie.SimpleCookie()
self.jSessionId = ""
def open(self, url, data = None):
if data is None:
method = urlfetch.GET
else:
method = urlfetch.POST
while url is not None:
try:
o = urlparse(url)
path = o.path
str = "Getting url ["+url+"] cookie ["+self._makeCookieHeader(path)+"]"
if data != None:
str += " data ["+data+"]"
logging.debug(str)
response = urlfetch.fetch(url=url,
payload=data,
method=method,
headers=self._getHeaders(path),
allow_truncated=False,
follow_redirects=False,
deadline=10
)
data = None # Next request will be a get, so no need to send the data again.
method = urlfetch.GET
cookieStr = response.headers.get('Set-cookie', '')
if self.jSessionId == "":
if cookieStr.find("JSESSIONID") != -1:
pattern = re.compile('JSESSIONID=(.*?);')
match = pattern.search(cookieStr)
if match != None:
self.jSessionId = match.group(1)
logging.debug("Received cookies: ["+cookieStr + "]\n")
self.cookie.load(response.headers.get('Set-cookie', '')) # Load the cookies from the response
# Change cookie to the gathered JSESSIONID
url = response.headers.get('location')
except urllib2.URLError, e:
logging.error("Generic error")
self.response.out.write("Error")
handleError(e)
#except DownloadError:
# logging.error("Download error")
#except:
# logging.error("Other error")
return response
def _getHeaders(self, path):
headers = {
# 'Content-Type': 'text/html',
'User-agent': "Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10.6; en-US; rv:1.9.2.12) Gecko/20101026 Firefox/3.6.12",
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
'Keep-Alive': '300',
'Connection': 'keep-alive',
'Accept-Language': 'en-us,en;q=0.5',
'Accept-Charset': 'ISO-8859-1,utf-8;q=0.7,*;q=0.7',
'Cookie' : self._makeCookieHeader(path)
}
return headers
def _makeCookieHeader(self, path):
cookieHeader = ""
logStr = ""
for k,v in self.cookie.items():
# if v.key == "JSESSIONID":
# if self.jSessionId != "":
# logging.debug("\n==== Replaced jsession ====\n")
# v.value = self.jSessionId
if 'path' in v:
if path.find(v['path'], 0, len(v['path'])) != -1:
logging.debug("\n==== "+v['path']+" ====\n")
cookieHeader += "%s=%s; " % (v.key, v.value)
elif v["path"] == "/,":
logging.debug("\n==== "+v['path']+" ====\n")
cookieHeader += "%s=%s; " % (v.key, v.value)
else:
logging.debug("\n==== Not Including "+v['path']+" ====\n")
else:
cookieHeader += "%s=%s; " % (v.key, v.value)
# return self.cookie.output("")
return cookieHeader
################################################################################
class BooksParser:
result = ""
results = ""
posList = ""
posEndList = ""
data = ""
count = 0
regex = {
'requirement': "<tr class=\"book course-(?P<requirement>[\w ]+)\">",
'image': "<td class=\"book-cover\"><a href=\"(?P<image>.*?)\"",
'title': '<span class=\"book-title\">(?P<title>.*?)</span>',
'author': '<span class=\"book-meta book-author\">(?P<author>.*?)</span>',
'isbn': '<span class=\"isbn\">(?P<isbn>\d+)</span>',
'copyright': '<span class=\"book-meta book-copyright\">(?P<copyright>.*?)</span>',
'publisher': '<span class=\"book-meta book-publisher\">(?P<publisher>.*?)</span>',
'edition': '<span class=\"book-meta book-edition\">(?P<edition>.*?)</span>',
'binding': '<span class=\"book-meta book-binding\">(?P<binding>.*?)</span>',
'priceNew': "<input type=\"hidden\" name=\"product-new-price-\d+\" id=\"product-new-price-\d+\" value=\"(?P<priceNew>\d+)\" />",
'priceUsed': "<input type=\"hidden\" name=\"product-used-price-\d+\" id=\"product-used-price-\d+\" value=\"(?P<priceUsed>\d+)\" />",
'priceNewRent': "<input type=\"hidden\" name=\"product-new-rental-price-\d+\" id=\"product-new-rental-price-\d+\" value=\"(?P<priceNewRent>\d+)\" />",
'priceUsedRent': "<input type=\"hidden\" name=\"product-used-rental-price-\d+\" id=\"product-used-rental-price-\d+\" value=\"(?P<priceUsedRent>\d+)\" />",
'availNew': "<td class=\"price\"><label for=\"radio-sku-new_\d+\">(?P<availNew>.*?)</label>",
'availUsed': "<td class=\"price\"><label for=\"radio-sku-used_\d+\">(?P<availUsed>.*?)</label>",
'availNewRent': "<td class=\"price\"><label for=\"radio-radio-sku-new-rental_\d+\">(?P<availNewRent>.*?)</label>",
'availUsedRent': "<td class=\"price\"><label for=\"radio-radio-sku-used-rental_\d+\">(?P<availUsedRent>.*?)</label>"
}
regexKeys = [ 'requirement', 'image', 'title', 'author', 'isbn', 'copyright', 'publisher', 'edition', 'binding', 'priceNew', 'priceUsed', 'priceNewRent', 'priceUsedRent', 'priceUsed', 'availNew', 'availUsed', 'availNewRent', 'availUsedRent'];
def __init__(self):
self.results = list()
self.posList = list()
self.posEndList = list()
self.data = ""
self.count = 0
self.result = {
'requirement': "",
'image': "",
'priceNew': "",
'priceUsed': "",
'priceNewRent': "",
'priceUsedRent': "",
'availNew': "",
'availUsed': "",
'availNewRent': "",
'availUsedRent': "",
'isbn': ""
}
def setData(self, data):
self.data = data;
def setup(self):
# Cut down the size of data
# Remove the recommended products
# TO-DO: support Recommended Products
endOffset = self.data.find("Recommended Products")
if endOffset != -1:
self.data = self.data[0:endOffset]
count = 0
# For each item (book) all regex strings may not be found
# We take care to associate meta-data with the correct book
# Assume that the first regex is always found, search the whole
# data books for all occurences of the first regex and note the offsets
# Then, only search between these books
k = self.regexKeys[0]
matchIter = re.finditer(self.regex[k], self.data)
startPrev = 0
endPrev = 0
flagFirst = True
for match in matchIter:
start = match.start(0)
end = match.end(0)
if flagFirst == True:
flagFirst = False
else:
self.posList.append(startPrev)
self.posEndList.append(start)
startPrev = start
endPrev = end
count += 1
# Add the final entry
self.posList.append(start)
self.posEndList.append(len(self.data))
self.posList.reverse()
self.posEndList.reverse()
def next(self):
if len(self.posList) == 0:
return False
pos = self.posList.pop()
posEnd = self.posEndList.pop()
for k in self.regexKeys:
pattern = re.compile(self.regex[k])
match = pattern.search(self.data, pos, posEnd)
if match == None:
self.result[k] = "NOT_FOUND" # fill result with nothing
else:
self.result[k] = match.group(1)
# self.results.append(self.result)
self.results.append(dict(self.result))
# self.results.append("%d %s" % (len(self.posList), "World"))
return True
class CampusTerm(webapp.RequestHandler):
url = "http://www.billsbookstore.com/"
regexSelect = '<select name=\"selTerm\" id=\"fTerm\" class=\"box\" title=\"Select a campus term\">(?P<select>.*?)</select>'
regexOption = '<option value=\"(\d+)\|(\d+)\">(.*?)</option>'
def get(self):
urlFull = self.url
try:
result = urllib2.urlopen(urlFull)
data = result.read()
pattern = re.compile(self.regexSelect)
match = pattern.search(data)
datatmp = match.group(1)
pattern = re.compile(self.regexOption)
pos = 0;
results = list()
while True:
match = pattern.search(datatmp, pos)
if match == None:
break;
results.append({ 'campustermId' : match.group(1)+'|'+match.group(2), 'campusId' : match.group(1), 'termId' : match.group(2), 'campusName' : match.group(3) })
pos = match.end(0)
self.response.out.write(simplejson.dumps(results))
except urllib2.URLError, e:
handleError(e)
class Department(webapp.RequestHandler):
url = "http://www.billsbookstore.com/textbooks_xml.asp?control=campus"
regexDept = '<department id=\"(\d+)\" abrev=\"(.*?)\" name=\"(.*?)\" />'
def get(self):
dept = self.request.get('campus')
term = self.request.get('term')
urlFull = self.url+'&campus='+dept+'&term='+term
try:
result = urllib2.urlopen(urlFull)
data = result.read()
pattern = re.compile(self.regexDept)
pos = 0;
results = list()
while True:
match = pattern.search(data, pos)
if match == None:
break;
results.append({ 'deptId' : match.group(1), 'deptAbrev': match.group(2), 'deptName' : match.group(3) })
pos = match.end(0)
self.response.out.write(simplejson.dumps(results))
except urllib2.URLError, e:
handleError(e)
class Course(webapp.RequestHandler):
url = "http://www.billsbookstore.com/textbooks_xml.asp?control=department"
regexCourse = '<course id=\"(\d+)\" name=\"(\d+)\s+\" />'
def get(self):
dept = self.request.get('dept')
term = self.request.get('term')
urlFull = self.url+'&dept='+dept+'&term='+term
try:
result = urllib2.urlopen(urlFull)
data = result.read()
pattern = re.compile(self.regexCourse)
pos = 0;
results = list()
while True:
match = pattern.search(data, pos)
if match == None:
break;
results.append({ 'courseId' : match.group(1), 'courseNumber' : match.group(2) })
pos = match.end(0)
self.response.out.write(simplejson.dumps(results))
except urllib2.URLError, e:
handleError(e)
class Section(webapp.RequestHandler):
url = "http://www.billsbookstore.com/textbooks_xml.asp?control=course"
regexSection = '<section id=\"(\d+)\" name=\"(.*?)\" instructor=\"(.*?)\" />'
def get(self):
dept = self.request.get('course')
term = self.request.get('term')
urlFull = self.url+'&course='+dept+'&term='+term
try:
result = urllib2.urlopen(urlFull)
data = result.read()
pattern = re.compile(self.regexSection)
pos = 0;
results = list()
while True:
match = pattern.search(data, pos)
if match == None:
break;
results.append({ 'sectionId' : match.group(1), 'sectionName' : match.group(2), 'instructor' : match.group(3) })
pos = match.end(0)
self.response.out.write(simplejson.dumps(results))
except urllib2.URLError, e:
handleError(e)
class Books(webapp.RequestHandler):
url = "http://www.billsbookstore.com/textbooks_xml.asp?control=section§ion="
def get(self):
section = self.request.get('id');
# section = "53867" # Single book
# section = "53857" # Many books, and a PRS clicker
# section = "55512" # Multiple books, single section
urlFull = self.url+section
try:
sp = BooksParser()
result = urllib2.urlopen(urlFull)
data = result.read()
sp.setData(data)
sp.setup()
while sp.next():
True
# for k in sp.regexKeys:
# self.response.out.write(k + "=" + sp.result[k] + "<br>\n")
self.response.out.write(simplejson.dumps(sp.results))
except urllib2.URLError, e:
handleError(e)
class BlackBoard(webapp.RequestHandler):
urlLogin = 'https://bb5.fsu.edu/cas/'
urlSession = 'https://bb5.fsu.edu/cas/login?loginurl=https%3A%2F%2Fapps.oti.fsu.edu%2FSecureLogin%2FLogin&service=https%3A%2F%2Fapps.oti.fsu.edu%2FSecureLogin%2FAuthenticator%3Fnoheader%3Dtrue%26nofooter%3Dtrue'
urlSecureApps = 'https://apps.oti.fsu.edu/SecureLogin/servlet/PortalHandler'
urlSchedule = 'https://apps.oti.fsu.edu/StudentClassSchedule/Schedule'
def get(self):
username = self.request.get('username');
password = self.request.get('password');
urlhandler = URLOpener()
urlhandler.cookie.clear()
# Login
try:
post_data = urllib.urlencode({
'username':username,
'password': password,
'service': 'https://campus.fsu.edu/webapps/login/',
'loginurl': 'https://campus.fsu.edu/webapps/login/bb_bb60/logincas.jsp',
'x': 0,
'y': 0
})
result = urlhandler.open(self.urlSession, post_data)
data = result.content
logging.debug(data)
pattern = re.compile('window.location.href="(.*?)"')
match = pattern.search(data)
if match != None:
urlRedirect = match.group(1)
# Complete login process
# by following window.location.href
result = urlhandler.open(urlRedirect)
data = result.content
# logging.debug(data)
except urllib2.URLError, e:
self.response.out.write("Error")
handleError(e)
# Session
# try:
# result = urlhandler.open(self.urlSession)
# data = result.content
# self.response.out.write(data)
# except urllib2.URLError, e:
# self.response.out.write("Error")
# handleError(e)
# Secure apps
try:
# Setup the session
result = urlhandler.open(self.urlSecureApps)
data = result.content
# logging.debug(data)
# Submit accept
post_data = urllib.urlencode({'submit' : 'Accept'})
result = urlhandler.open(self.urlSecureApps, post_data)
data = result.content
# logging.debug(data)
except urllib2.URLError, e:
self.response.out.write("Error")
handleError(e)
# Get schedule
try:
# Grab the list of "secure apps" links
result = urlhandler.open(self.urlSchedule)
data = result.content
# logging.debug(data)
# pattern = re.compile('<a target="" href=".*?SESSIONID=(.*?)\&.*?">My Class Schedule</a>')
# match = pattern.search(data)
# sessionId = ""
# if match != None:
# sessionId = match.group(1)
# urlhandler.cookie.load("JSESSIONID="+sessionId)
# logging.debug("\n\n Using session ["+sessionId+"]\n\n")
# else:
# logging.debug("\n\n\n\n=============================Session not found\n\n\n\n")
# pattern = re.compile('<a target="" href="(.*?)">My Class Schedule</a>')
# match = pattern.search(data)
# if match != None:
# urlRedirect = match.group(1)
# result = urlhandler.open('https://apps.oti.fsu.edu/'+urlRedirect)
# data = result.content
# logging.debug(data)
post_data = urllib.urlencode({
'CALLINGURI' : '/jsp/schedule_index.jsp',
'refer' : 'null',
'YEAR' : 2011,
'TERM' : 9,
'genSched' : 'Generate A Schedule'
})
result = urlhandler.open(self.urlSchedule, post_data)
data = result.content
logging.debug(data)
# self.response.out.write(urlhandler._makeCookieHeader())
# self.response.out.write(data)
# result = urlhandler.open('https://campus.fsu.edu/webapps/login?action=logout')
matchIter = re.finditer('<td class="only" align=".*?">(.*?)</td>', data)
count=0
matchKeys = [ 'alert', 'num', 'course', 'section', 'session', 'sectionId', 'title', 'hours', 'building', 'room', 'days', 'begin', 'end' ]
countMax = len(matchKeys)
schedule = []
klass = {}
# TODO: alert element will have some extra html in it (<span></span>)
for match in matchIter:
klass[matchKeys[count]] = match.group(1)
count += 1
if count == countMax:
schedule.append(klass)
count = 0
klass = {}
self.response.out.write(simplejson.dumps(schedule))
except urllib2.URLError, e:
self.response.out.write("Error")
handleError(e)
application = webapp.WSGIApplication([('/campusterm', CampusTerm),
('/dept', Department),
('/course', Course),
('/section', Section),
('/books', Books),
('/bb', BlackBoard)],
debug=True)
def main():
run_wsgi_app(application)
if __name__ == "__main__":
main()
| gpl-3.0 | 1,125,095,166,283,836,500 | 38.165984 | 246 | 0.513316 | false |
barykaed/Pelican-Test | fsp_env/Lib/site-packages/pip/_vendor/distlib/index.py | 571 | 20976 | # -*- coding: utf-8 -*-
#
# Copyright (C) 2013 Vinay Sajip.
# Licensed to the Python Software Foundation under a contributor agreement.
# See LICENSE.txt and CONTRIBUTORS.txt.
#
import hashlib
import logging
import os
import shutil
import subprocess
import tempfile
try:
from threading import Thread
except ImportError:
from dummy_threading import Thread
from . import DistlibException
from .compat import (HTTPBasicAuthHandler, Request, HTTPPasswordMgr,
urlparse, build_opener, string_types)
from .util import cached_property, zip_dir, ServerProxy
logger = logging.getLogger(__name__)
DEFAULT_INDEX = 'https://pypi.python.org/pypi'
DEFAULT_REALM = 'pypi'
class PackageIndex(object):
"""
This class represents a package index compatible with PyPI, the Python
Package Index.
"""
boundary = b'----------ThIs_Is_tHe_distlib_index_bouNdaRY_$'
def __init__(self, url=None):
"""
Initialise an instance.
:param url: The URL of the index. If not specified, the URL for PyPI is
used.
"""
self.url = url or DEFAULT_INDEX
self.read_configuration()
scheme, netloc, path, params, query, frag = urlparse(self.url)
if params or query or frag or scheme not in ('http', 'https'):
raise DistlibException('invalid repository: %s' % self.url)
self.password_handler = None
self.ssl_verifier = None
self.gpg = None
self.gpg_home = None
self.rpc_proxy = None
with open(os.devnull, 'w') as sink:
for s in ('gpg2', 'gpg'):
try:
rc = subprocess.check_call([s, '--version'], stdout=sink,
stderr=sink)
if rc == 0:
self.gpg = s
break
except OSError:
pass
def _get_pypirc_command(self):
"""
Get the distutils command for interacting with PyPI configurations.
:return: the command.
"""
from distutils.core import Distribution
from distutils.config import PyPIRCCommand
d = Distribution()
return PyPIRCCommand(d)
def read_configuration(self):
"""
Read the PyPI access configuration as supported by distutils, getting
PyPI to do the acutal work. This populates ``username``, ``password``,
``realm`` and ``url`` attributes from the configuration.
"""
# get distutils to do the work
c = self._get_pypirc_command()
c.repository = self.url
cfg = c._read_pypirc()
self.username = cfg.get('username')
self.password = cfg.get('password')
self.realm = cfg.get('realm', 'pypi')
self.url = cfg.get('repository', self.url)
def save_configuration(self):
"""
Save the PyPI access configuration. You must have set ``username`` and
``password`` attributes before calling this method.
Again, distutils is used to do the actual work.
"""
self.check_credentials()
# get distutils to do the work
c = self._get_pypirc_command()
c._store_pypirc(self.username, self.password)
def check_credentials(self):
"""
Check that ``username`` and ``password`` have been set, and raise an
exception if not.
"""
if self.username is None or self.password is None:
raise DistlibException('username and password must be set')
pm = HTTPPasswordMgr()
_, netloc, _, _, _, _ = urlparse(self.url)
pm.add_password(self.realm, netloc, self.username, self.password)
self.password_handler = HTTPBasicAuthHandler(pm)
def register(self, metadata):
"""
Register a distribution on PyPI, using the provided metadata.
:param metadata: A :class:`Metadata` instance defining at least a name
and version number for the distribution to be
registered.
:return: The HTTP response received from PyPI upon submission of the
request.
"""
self.check_credentials()
metadata.validate()
d = metadata.todict()
d[':action'] = 'verify'
request = self.encode_request(d.items(), [])
response = self.send_request(request)
d[':action'] = 'submit'
request = self.encode_request(d.items(), [])
return self.send_request(request)
def _reader(self, name, stream, outbuf):
"""
Thread runner for reading lines of from a subprocess into a buffer.
:param name: The logical name of the stream (used for logging only).
:param stream: The stream to read from. This will typically a pipe
connected to the output stream of a subprocess.
:param outbuf: The list to append the read lines to.
"""
while True:
s = stream.readline()
if not s:
break
s = s.decode('utf-8').rstrip()
outbuf.append(s)
logger.debug('%s: %s' % (name, s))
stream.close()
def get_sign_command(self, filename, signer, sign_password,
keystore=None):
"""
Return a suitable command for signing a file.
:param filename: The pathname to the file to be signed.
:param signer: The identifier of the signer of the file.
:param sign_password: The passphrase for the signer's
private key used for signing.
:param keystore: The path to a directory which contains the keys
used in verification. If not specified, the
instance's ``gpg_home`` attribute is used instead.
:return: The signing command as a list suitable to be
passed to :class:`subprocess.Popen`.
"""
cmd = [self.gpg, '--status-fd', '2', '--no-tty']
if keystore is None:
keystore = self.gpg_home
if keystore:
cmd.extend(['--homedir', keystore])
if sign_password is not None:
cmd.extend(['--batch', '--passphrase-fd', '0'])
td = tempfile.mkdtemp()
sf = os.path.join(td, os.path.basename(filename) + '.asc')
cmd.extend(['--detach-sign', '--armor', '--local-user',
signer, '--output', sf, filename])
logger.debug('invoking: %s', ' '.join(cmd))
return cmd, sf
def run_command(self, cmd, input_data=None):
"""
Run a command in a child process , passing it any input data specified.
:param cmd: The command to run.
:param input_data: If specified, this must be a byte string containing
data to be sent to the child process.
:return: A tuple consisting of the subprocess' exit code, a list of
lines read from the subprocess' ``stdout``, and a list of
lines read from the subprocess' ``stderr``.
"""
kwargs = {
'stdout': subprocess.PIPE,
'stderr': subprocess.PIPE,
}
if input_data is not None:
kwargs['stdin'] = subprocess.PIPE
stdout = []
stderr = []
p = subprocess.Popen(cmd, **kwargs)
# We don't use communicate() here because we may need to
# get clever with interacting with the command
t1 = Thread(target=self._reader, args=('stdout', p.stdout, stdout))
t1.start()
t2 = Thread(target=self._reader, args=('stderr', p.stderr, stderr))
t2.start()
if input_data is not None:
p.stdin.write(input_data)
p.stdin.close()
p.wait()
t1.join()
t2.join()
return p.returncode, stdout, stderr
def sign_file(self, filename, signer, sign_password, keystore=None):
"""
Sign a file.
:param filename: The pathname to the file to be signed.
:param signer: The identifier of the signer of the file.
:param sign_password: The passphrase for the signer's
private key used for signing.
:param keystore: The path to a directory which contains the keys
used in signing. If not specified, the instance's
``gpg_home`` attribute is used instead.
:return: The absolute pathname of the file where the signature is
stored.
"""
cmd, sig_file = self.get_sign_command(filename, signer, sign_password,
keystore)
rc, stdout, stderr = self.run_command(cmd,
sign_password.encode('utf-8'))
if rc != 0:
raise DistlibException('sign command failed with error '
'code %s' % rc)
return sig_file
def upload_file(self, metadata, filename, signer=None, sign_password=None,
filetype='sdist', pyversion='source', keystore=None):
"""
Upload a release file to the index.
:param metadata: A :class:`Metadata` instance defining at least a name
and version number for the file to be uploaded.
:param filename: The pathname of the file to be uploaded.
:param signer: The identifier of the signer of the file.
:param sign_password: The passphrase for the signer's
private key used for signing.
:param filetype: The type of the file being uploaded. This is the
distutils command which produced that file, e.g.
``sdist`` or ``bdist_wheel``.
:param pyversion: The version of Python which the release relates
to. For code compatible with any Python, this would
be ``source``, otherwise it would be e.g. ``3.2``.
:param keystore: The path to a directory which contains the keys
used in signing. If not specified, the instance's
``gpg_home`` attribute is used instead.
:return: The HTTP response received from PyPI upon submission of the
request.
"""
self.check_credentials()
if not os.path.exists(filename):
raise DistlibException('not found: %s' % filename)
metadata.validate()
d = metadata.todict()
sig_file = None
if signer:
if not self.gpg:
logger.warning('no signing program available - not signed')
else:
sig_file = self.sign_file(filename, signer, sign_password,
keystore)
with open(filename, 'rb') as f:
file_data = f.read()
md5_digest = hashlib.md5(file_data).hexdigest()
sha256_digest = hashlib.sha256(file_data).hexdigest()
d.update({
':action': 'file_upload',
'protcol_version': '1',
'filetype': filetype,
'pyversion': pyversion,
'md5_digest': md5_digest,
'sha256_digest': sha256_digest,
})
files = [('content', os.path.basename(filename), file_data)]
if sig_file:
with open(sig_file, 'rb') as f:
sig_data = f.read()
files.append(('gpg_signature', os.path.basename(sig_file),
sig_data))
shutil.rmtree(os.path.dirname(sig_file))
request = self.encode_request(d.items(), files)
return self.send_request(request)
def upload_documentation(self, metadata, doc_dir):
"""
Upload documentation to the index.
:param metadata: A :class:`Metadata` instance defining at least a name
and version number for the documentation to be
uploaded.
:param doc_dir: The pathname of the directory which contains the
documentation. This should be the directory that
contains the ``index.html`` for the documentation.
:return: The HTTP response received from PyPI upon submission of the
request.
"""
self.check_credentials()
if not os.path.isdir(doc_dir):
raise DistlibException('not a directory: %r' % doc_dir)
fn = os.path.join(doc_dir, 'index.html')
if not os.path.exists(fn):
raise DistlibException('not found: %r' % fn)
metadata.validate()
name, version = metadata.name, metadata.version
zip_data = zip_dir(doc_dir).getvalue()
fields = [(':action', 'doc_upload'),
('name', name), ('version', version)]
files = [('content', name, zip_data)]
request = self.encode_request(fields, files)
return self.send_request(request)
def get_verify_command(self, signature_filename, data_filename,
keystore=None):
"""
Return a suitable command for verifying a file.
:param signature_filename: The pathname to the file containing the
signature.
:param data_filename: The pathname to the file containing the
signed data.
:param keystore: The path to a directory which contains the keys
used in verification. If not specified, the
instance's ``gpg_home`` attribute is used instead.
:return: The verifying command as a list suitable to be
passed to :class:`subprocess.Popen`.
"""
cmd = [self.gpg, '--status-fd', '2', '--no-tty']
if keystore is None:
keystore = self.gpg_home
if keystore:
cmd.extend(['--homedir', keystore])
cmd.extend(['--verify', signature_filename, data_filename])
logger.debug('invoking: %s', ' '.join(cmd))
return cmd
def verify_signature(self, signature_filename, data_filename,
keystore=None):
"""
Verify a signature for a file.
:param signature_filename: The pathname to the file containing the
signature.
:param data_filename: The pathname to the file containing the
signed data.
:param keystore: The path to a directory which contains the keys
used in verification. If not specified, the
instance's ``gpg_home`` attribute is used instead.
:return: True if the signature was verified, else False.
"""
if not self.gpg:
raise DistlibException('verification unavailable because gpg '
'unavailable')
cmd = self.get_verify_command(signature_filename, data_filename,
keystore)
rc, stdout, stderr = self.run_command(cmd)
if rc not in (0, 1):
raise DistlibException('verify command failed with error '
'code %s' % rc)
return rc == 0
def download_file(self, url, destfile, digest=None, reporthook=None):
"""
This is a convenience method for downloading a file from an URL.
Normally, this will be a file from the index, though currently
no check is made for this (i.e. a file can be downloaded from
anywhere).
The method is just like the :func:`urlretrieve` function in the
standard library, except that it allows digest computation to be
done during download and checking that the downloaded data
matched any expected value.
:param url: The URL of the file to be downloaded (assumed to be
available via an HTTP GET request).
:param destfile: The pathname where the downloaded file is to be
saved.
:param digest: If specified, this must be a (hasher, value)
tuple, where hasher is the algorithm used (e.g.
``'md5'``) and ``value`` is the expected value.
:param reporthook: The same as for :func:`urlretrieve` in the
standard library.
"""
if digest is None:
digester = None
logger.debug('No digest specified')
else:
if isinstance(digest, (list, tuple)):
hasher, digest = digest
else:
hasher = 'md5'
digester = getattr(hashlib, hasher)()
logger.debug('Digest specified: %s' % digest)
# The following code is equivalent to urlretrieve.
# We need to do it this way so that we can compute the
# digest of the file as we go.
with open(destfile, 'wb') as dfp:
# addinfourl is not a context manager on 2.x
# so we have to use try/finally
sfp = self.send_request(Request(url))
try:
headers = sfp.info()
blocksize = 8192
size = -1
read = 0
blocknum = 0
if "content-length" in headers:
size = int(headers["Content-Length"])
if reporthook:
reporthook(blocknum, blocksize, size)
while True:
block = sfp.read(blocksize)
if not block:
break
read += len(block)
dfp.write(block)
if digester:
digester.update(block)
blocknum += 1
if reporthook:
reporthook(blocknum, blocksize, size)
finally:
sfp.close()
# check that we got the whole file, if we can
if size >= 0 and read < size:
raise DistlibException(
'retrieval incomplete: got only %d out of %d bytes'
% (read, size))
# if we have a digest, it must match.
if digester:
actual = digester.hexdigest()
if digest != actual:
raise DistlibException('%s digest mismatch for %s: expected '
'%s, got %s' % (hasher, destfile,
digest, actual))
logger.debug('Digest verified: %s', digest)
def send_request(self, req):
"""
Send a standard library :class:`Request` to PyPI and return its
response.
:param req: The request to send.
:return: The HTTP response from PyPI (a standard library HTTPResponse).
"""
handlers = []
if self.password_handler:
handlers.append(self.password_handler)
if self.ssl_verifier:
handlers.append(self.ssl_verifier)
opener = build_opener(*handlers)
return opener.open(req)
def encode_request(self, fields, files):
"""
Encode fields and files for posting to an HTTP server.
:param fields: The fields to send as a list of (fieldname, value)
tuples.
:param files: The files to send as a list of (fieldname, filename,
file_bytes) tuple.
"""
# Adapted from packaging, which in turn was adapted from
# http://code.activestate.com/recipes/146306
parts = []
boundary = self.boundary
for k, values in fields:
if not isinstance(values, (list, tuple)):
values = [values]
for v in values:
parts.extend((
b'--' + boundary,
('Content-Disposition: form-data; name="%s"' %
k).encode('utf-8'),
b'',
v.encode('utf-8')))
for key, filename, value in files:
parts.extend((
b'--' + boundary,
('Content-Disposition: form-data; name="%s"; filename="%s"' %
(key, filename)).encode('utf-8'),
b'',
value))
parts.extend((b'--' + boundary + b'--', b''))
body = b'\r\n'.join(parts)
ct = b'multipart/form-data; boundary=' + boundary
headers = {
'Content-type': ct,
'Content-length': str(len(body))
}
return Request(self.url, body, headers)
def search(self, terms, operator=None):
if isinstance(terms, string_types):
terms = {'name': terms}
if self.rpc_proxy is None:
self.rpc_proxy = ServerProxy(self.url, timeout=3.0)
return self.rpc_proxy.search(terms, operator or 'and')
| mit | 3,313,144,646,176,847,400 | 39.888889 | 79 | 0.545195 | false |
coders4help/volunteer_planner | shiftmailer/excelexport.py | 5 | 2641 | # coding: utf-8
import logging
import os
import tempfile
from django.conf import settings
from django.core.mail.message import EmailMessage
from excel_renderer import ExcelRenderer
log = logging.getLogger(__name__)
class GenerateExcelSheet:
def __init__(self, shifts, mailer):
if not shifts:
raise AssertionError(u'No shifts given. Cannot generate Excel file for {}.'.format(mailer.facility))
self.shifts = shifts
self.mailer = mailer
self.tmpdir = tempfile.mkdtemp()
self.tmpfiles = []
def __del__(self):
for filename in self.tmpfiles:
if filename and os.path.exists(filename):
os.remove(filename)
if self.tmpdir is not None and os.path.exists(self.tmpdir):
log.debug(u'Removing tmpdir %s', self.tmpdir)
os.removedirs(self.tmpdir)
def generate_excel(self):
if self.shifts is None or self.shifts.count() == 0:
log.warn(u'No shifts, not shift schedule.')
return None
log.debug(u'About to generate XLS for facility "%s"', self.shifts[0].facility)
log.debug(u'Shifts query: %s', self.shifts.query)
filename = os.path.join(self.tmpdir, u'Dienstplan_{}_{}.xls'.format(self.mailer.organization,
self.shifts[0].starting_time
.strftime('%Y%m%d')))
self.tmpfiles.append(filename)
renderer = ExcelRenderer()
renderer.generate_shift_overview(self.mailer.organization, self.mailer.facility, self.shifts, filename)
return filename
def send_file(self):
attachment = self.generate_excel()
if not self.mailer:
log.error(u'Cannot create and send email without mailer information')
return
mail = EmailMessage()
mail.body = "Hallo " + self.mailer.first_name + " " + self.mailer.last_name + "\n\n"\
"Anbei die Liste zum Dienstplan der Freiwilligen.\nDies ist ein Service von volunteer-planner.org"
mail.subject = "Dienstplan fuer den " + self.shifts[0].starting_time.strftime("%d.%m.%Y") + \
" der Freiwilligen in der Unterkunft " + self.shifts[0].facility.name
mail.from_email = settings.DEFAULT_FROM_EMAIL
mail.to = [str(self.mailer.email)]
if attachment is not None:
mail.attach_file(path=attachment, mimetype='application/vnd.ms-excel')
mail.send()
else:
log.warn(u'No attachment, not mail.')
| agpl-3.0 | 5,214,641,586,995,500,000 | 39.015152 | 118 | 0.595229 | false |
ryanjmccall/nupic | nupic/frameworks/opf/expdescriptionhelpers.py | 7 | 15073 | # ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2013, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
import os
import imp
from nupic.data.dictutils import rUpdate
# This file contains utility functions that are used
# internally by the prediction framework and may be imported
# by description files. Functions that are used only by
# the prediction framework should be in utils.py
#
# This file provides support for the following experiment description features:
#
# 1. Sub-experiment support
# 2. Lazy evaluators (e.g., DeferredDictLookup, applyValueGettersToContainer)
###############################################################################
# Sub-experiment support
###############################################################################
#########################################################
# Utility methods for description files are organized as a base description
# and an experiment based on that base description.
# The base description calls getConfig to get the configuration from the
# specific experiment, and the specific experiment calls importBaseDescription
# empty initial config allows base experiment to run by itself
_config = dict()
# Save the path to the current sub-experiment here during importBaseDescription()
subExpDir = None
# We will load the description file as a module, which allows us to
# use the debugger and see source code. But description files are frequently
# modified and we want to be able to easily reload them. To facilitate this,
# we reload with a unique module name ("pf_description%d") each time.
baseDescriptionImportCount = 0
#########################################################
def importBaseDescription(path, config):
global baseDescriptionImportCount, _config, subExpDir
if not os.path.isabs(path):
# grab the path to the file doing the import
import inspect
callingFrame = inspect.stack()[1][0]
callingFile = callingFrame.f_globals['__file__']
subExpDir = os.path.dirname(callingFile)
path = os.path.join(subExpDir, path)
#print "Importing from: %s" % path
# stash the config in a place where the loading module can find it.
_config = config
mod = imp.load_source("pf_base_description%d" % baseDescriptionImportCount,
path)
# don't want to override __file__ in our caller
mod.__base_file__ = mod.__file__
del mod.__file__
baseDescriptionImportCount += 1
return mod
#########################################################
# Newer method just updates from sub-experiment
def updateConfigFromSubConfig(config):
# _config is the configuration provided by the sub-experiment
global _config
rUpdate(config, _config)
_config = dict()
#########################################################
def getSubExpDir():
global subExpDir
return subExpDir
###############################################################################
# Lazy evaluators (DeferredDictLookup, applyValueGettersToContainer, and friends)
###############################################################################
###############################################################################
class ValueGetterBase(object):
""" Base class for "value getters" (e.g., class DictValueGetter) that are used
to resolve values of sub-fields after the experiment's config dictionary (in
description.py) is defined and possibly updated from a sub-experiment.
This solves the problem of referencing the config dictionary's field from within
the definition of the dictionary itself (before the dictionary's own defintion
is complete).
NOTE: its possible that the referenced value does not yet exist at the
time of instantiation of a given value-getter future. It will be
resolved when the base description.py calls
applyValueGettersToContainer().
NOTE: The constructor of the derived classes MUST call our constructor.
NOTE: The derived classes MUST override handleGetValue(self).
NOTE: may be used by base and sub-experiments to derive their own custom value
getters; however, their use is applicapble only where permitted, as
described in comments within descriptionTemplate.tpl. See class
DictValueGetter for implementation example.
"""
class __NoResult(object):
""" A private class that we use as a special unique value to indicate that
our result cache instance variable does not hold a valid result.
"""
pass
def __init__(self):
#print("NOTE: ValueGetterBase INITIALIZING")
self.__inLookup = False
self.__cachedResult = self.__NoResult
def __call__(self, topContainer):
""" Resolves the referenced value. If the result is already cached,
returns it to caller. Otherwise, invokes the pure virtual method
handleGetValue. If handleGetValue() returns another value-getter, calls
that value-getter to resolve the value. This may result in a chain of calls
that terminates once the value is fully resolved to a non-value-getter value.
Upon return, the value is fully resolved and cached, so subsequent calls will
always return the cached value reference.
topContainer: The top-level container (dict, tuple, or list [sub-]instance)
within whose context the value-getter is applied.
Returns: The fully-resolved value that was referenced by the value-getter
instance
"""
#print("IN ValueGetterBase.__CAll__()")
assert(not self.__inLookup)
if self.__cachedResult is not self.__NoResult:
return self.__cachedResult
self.__cachedResult = self.handleGetValue(topContainer)
if isinstance(self.__cachedResult, ValueGetterBase):
valueGetter = self.__cachedResult
self.__inLookup = True
self.__cachedResult = valueGetter(topContainer)
self.__inLookup = False
# The value should be full resolved at this point
assert(self.__cachedResult is not self.__NoResult)
assert(not isinstance(self.__cachedResult, ValueGetterBase))
return self.__cachedResult
def handleGetValue(self, topContainer):
""" A "pure virtual" method. The derived class MUST override this method
and return the referenced value. The derived class is NOT responsible for
fully resolving the reference'd value in the event the value resolves to
another ValueGetterBase-based instance -- this is handled automatically
within ValueGetterBase implementation.
topContainer: The top-level container (dict, tuple, or list [sub-]instance)
within whose context the value-getter is applied.
Returns: The value referenced by this instance (which may be another
value-getter instance)
"""
raise NotImplementedError("ERROR: ValueGetterBase is an abstract " + \
"class; base class MUST override handleGetValue()")
###############################################################################
class DictValueGetter(ValueGetterBase):
"""
Creates a "future" reference to a value within a top-level or a nested
dictionary. See also class DeferredDictLookup.
"""
def __init__(self, referenceDict, *dictKeyChain):
"""
referenceDict: Explicit reference dictionary that contains the field
corresonding to the first key name in dictKeyChain. This may
be the result returned by the built-in globals() function,
when we desire to look up a dictionary value from a dictionary
referenced by a global variable within the calling module.
If None is passed for referenceDict, then the topContainer
parameter supplied to handleGetValue() will be used as the
reference dictionary instead (this allows the desired module
to designate the appropriate reference dictionary for the
value-getters when it calls applyValueGettersToContainer())
dictKeyChain: One or more strings; the first string is a key (that will
eventually be defined) in the reference dictionary. If
additional strings are supplied, then the values
correspnding to prior key strings must be dictionaries, and
each additionl string references a sub-dictionary of the
former. The final string is the key of the field whose value
will be returned by handleGetValue().
NOTE: Its possible that the referenced value does not yet exist at the
time of instantiation of this class. It will be resolved when the
base description.py calls applyValueGettersToContainer().
Example:
config = dict(
_dsEncoderFieldName2_N = 70,
_dsEncoderFieldName2_W = 5,
dsEncoderSchema = [
dict(
base=dict(
fieldname='Name2', type='ScalarEncoder',
name='Name2', minval=0, maxval=270, clipInput=True,
n=DictValueGetter(None, '_dsEncoderFieldName2_N'),
w=DictValueGetter(None, '_dsEncoderFieldName2_W')),
),
],
)
updateConfigFromSubConfig(config)
applyValueGettersToContainer(config)
"""
# First, invoke base constructor
ValueGetterBase.__init__(self)
assert(referenceDict is None or isinstance(referenceDict, dict))
assert(len(dictKeyChain) >= 1)
self.__referenceDict = referenceDict
self.__dictKeyChain = dictKeyChain
def handleGetValue(self, topContainer):
""" This method overrides ValueGetterBase's "pure virtual" method. It
returns the referenced value. The derived class is NOT responsible for
fully resolving the reference'd value in the event the value resolves to
another ValueGetterBase-based instance -- this is handled automatically
within ValueGetterBase implementation.
topContainer: The top-level container (dict, tuple, or list [sub-]instance)
within whose context the value-getter is applied. If
self.__referenceDict is None, then topContainer will be used
as the reference dictionary for resolving our dictionary key
chain.
Returns: The value referenced by this instance (which may be another
value-getter instance)
"""
value = self.__referenceDict if self.__referenceDict is not None else topContainer
for key in self.__dictKeyChain:
value = value[key]
return value
###############################################################################
class DeferredDictLookup(DictValueGetter):
"""
Creates a "future" reference to a value within an implicit dictionary that
will be passed to applyValueGettersToContainer() in the future (typically
called by description.py after its config dictionary has been updated from
the sub-experiment). The reference is relative to the dictionary that will
be passed to applyValueGettersToContainer()
"""
def __init__(self, *dictKeyChain):
"""
dictKeyChain: One or more strings; the first string is a key (that will
eventually be defined) in the dictionary that will be passed
to applyValueGettersToContainer(). If additional strings are
supplied, then the values correspnding to prior key strings
must be dictionaries, and each additionl string references a
sub-dictionary of the former. The final string is the key of
the field whose value will be returned by this value-getter
NOTE: its possible that the referenced value does not yet exist at the
time of instantiation of this class. It will be resolved when the
base description.py calls applyValueGettersToContainer().
Example:
config = dict(
_dsEncoderFieldName2_N = 70,
_dsEncoderFieldName2_W = 5,
dsEncoderSchema = [
dict(
base=dict(
fieldname='Name2', type='ScalarEncoder',
name='Name2', minval=0, maxval=270, clipInput=True,
n=DeferredDictLookup('_dsEncoderFieldName2_N'),
w=DeferredDictLookup('_dsEncoderFieldName2_W')),
),
],
)
updateConfigFromSubConfig(config)
applyValueGettersToContainer(config)
"""
# Invoke base (DictValueGetter constructor), passing None for referenceDict,
# which will force it use the dictionary passed via
# applyValueGettersToContainer(), instead.
DictValueGetter.__init__(self, None, *dictKeyChain)
###############################################################################
def applyValueGettersToContainer(container):
"""
"""
_applyValueGettersImpl(container=container, currentObj=container,
recursionStack=[])
###############################################################################
def _applyValueGettersImpl(container, currentObj, recursionStack):
"""
"""
# Detect cycles
if currentObj in recursionStack:
return
# Sanity-check of our cycle-detection logic
assert(len(recursionStack) < 1000)
# Push the current object on our cycle-detection stack
recursionStack.append(currentObj)
# Resolve value-getters within dictionaries, tuples and lists
if isinstance(currentObj, dict):
for (key, value) in currentObj.items():
if isinstance(value, ValueGetterBase):
currentObj[key] = value(container)
_applyValueGettersImpl(container, currentObj[key], recursionStack)
elif isinstance(currentObj, tuple) or isinstance(currentObj, list):
for (i, value) in enumerate(currentObj):
# NOTE: values within a tuple should never be value-getters, since
# the top-level elements within a tuple are immutable. However,
# if any nested sub-elements might be mutable
if isinstance(value, ValueGetterBase):
currentObj[i] = value(container)
_applyValueGettersImpl(container, currentObj[i], recursionStack)
else:
pass
recursionStack.pop()
return
| gpl-3.0 | -4,748,827,907,297,400,000 | 37.748072 | 86 | 0.645525 | false |
SeattleTestbed/repy_v2 | safe_check.py | 1 | 1136 | """
Author: Armon Dadgar
Date: June 11th, 2009
Description:
This simple script reads "code" from stdin, and runs safe.safe_check() on it.
The resulting return value or exception is serialized and written to stdout.
The purpose of this script is to be called from the main repy.py script so that the
memory used by the safe.safe_check() will be reclaimed when this process quits.
"""
import safe
import sys
import encoding_header
if __name__ == "__main__":
# Get the user "code"
usercode = sys.stdin.read()
# Output buffer
output = ""
# Check the code
try:
value = safe.safe_check(usercode)
output += str(value)
except Exception, e:
# Adjust traceback line numbers, see SeattleTestbed/repy_v2#95.
try:
e.lineno = e.lineno - \
len(encoding_header.ENCODING_DECLARATION.splitlines())
except (TypeError, AttributeError):
# Ignore exceptions with line numbers that are non-numeric (i.e.
# `None`), or have no `lineno` attribute altogether.
pass
output += str(type(e)) + " " + str(e)
# Write out
sys.stdout.write(output)
sys.stdout.flush()
| mit | -6,990,183,952,512,233,000 | 25.418605 | 85 | 0.669894 | false |
ViaQ/watches-cli | tests/commands/test_just_indices_stats.py | 1 | 1384 | """Tests for our `watches just_indices_stats` subcommand."""
import json
from subprocess import PIPE, Popen as popen
from secure_support import TestSecureSupport
class TestJustIndicesStats(TestSecureSupport):
def test_returns_index_per_line(self):
cmd = self.appendSecurityCommands(['watches', 'just_indices_stats', '-l', '--level=indices', '--timestamp'])
output = popen(cmd, stdout=PIPE).communicate()[0].decode('ascii')
self.assertTrue(len(output) > 0)
lines = 0
for line in output.splitlines():
lines += 1
o = json.loads(line)
self.assertTrue('index' in o)
self.assertTrue('timestamp' in o)
self.assertTrue('total' in o)
self.assertTrue('primaries' in o)
self.assertTrue(lines > 0)
def test_returns_index_per_line_just__all(self):
cmd = self.appendSecurityCommands(['watches', 'just_indices_stats', '-l'])
output = popen(cmd, stdout=PIPE).communicate()[0].decode('ascii')
self.assertTrue(len(output) > 0)
lines = 0
for line in output.splitlines():
lines += 1
o = json.loads(line)
self.assertTrue('index' in o)
self.assertTrue(o['index'] == '_all')
# Without specifying --level=indices we get only _all index stats
self.assertTrue(lines == 1)
| apache-2.0 | -9,034,204,081,982,815,000 | 37.444444 | 116 | 0.608382 | false |
tomekwojcik/Team-O-Matic | teamomatic/app.py | 1 | 2057 | # -*- coding: utf-8 -*-
#
# Copyright (C) 2011 by Tomasz WΓ³jcik <[email protected]>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
"""Application factory."""
import tornado.web
import tornado.locale
import teamomatic.handlers
import logging
import os
# Routing table.
routes = [
(r'/', teamomatic.handlers.IndexHandler)
]
def create_app(config=None):
"""Instantiates and initializes the app according to config dict."""
if config == None:
raise RuntimeError('No configuration given.')
config['static_path'] = os.path.join(os.path.dirname(__file__), 'static')
tornado.locale.load_translations(os.path.join(os.path.dirname(__file__), 'translations'))
logging_config = {
'format': "%(asctime)s %(name)s <%(levelname)s>: %(message)s",
'level': logging.INFO
}
if config.get('debug', False):
logging_config['level'] = logging.DEBUG
logging.basicConfig(**logging_config)
app = tornado.web.Application(routes, **config)
return app | mit | -5,744,565,955,526,264,000 | 35.732143 | 93 | 0.711089 | false |
zhulin2609/phantomjs | src/qt/qtwebkit/Tools/Scripts/webkitpy/tool/bot/commitqueuetask_unittest.py | 119 | 29255 | # Copyright (c) 2010 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from datetime import datetime
import logging
import unittest2 as unittest
from webkitpy.common.net import bugzilla
from webkitpy.common.net.layouttestresults import LayoutTestResults
from webkitpy.common.system.executive import ScriptError
from webkitpy.common.system.outputcapture import OutputCapture
from webkitpy.layout_tests.models import test_results
from webkitpy.layout_tests.models import test_failures
from webkitpy.thirdparty.mock import Mock
from webkitpy.tool.bot.commitqueuetask import *
from webkitpy.tool.bot.expectedfailures import ExpectedFailures
from webkitpy.tool.mocktool import MockTool
_log = logging.getLogger(__name__)
class MockCommitQueue(CommitQueueTaskDelegate):
def __init__(self, error_plan):
self._error_plan = error_plan
self._failure_status_id = 0
def run_command(self, command):
_log.info("run_webkit_patch: %s" % command)
if self._error_plan:
error = self._error_plan.pop(0)
if error:
raise error
def command_passed(self, success_message, patch):
_log.info("command_passed: success_message='%s' patch='%s'" % (
success_message, patch.id()))
def command_failed(self, failure_message, script_error, patch):
_log.info("command_failed: failure_message='%s' script_error='%s' patch='%s'" % (
failure_message, script_error, patch.id()))
self._failure_status_id += 1
return self._failure_status_id
def refetch_patch(self, patch):
return patch
def expected_failures(self):
return ExpectedFailures()
def test_results(self):
return None
def report_flaky_tests(self, patch, flaky_results, results_archive):
flaky_tests = [result.filename for result in flaky_results]
_log.info("report_flaky_tests: patch='%s' flaky_tests='%s' archive='%s'" % (patch.id(), flaky_tests, results_archive.filename))
def archive_last_test_results(self, patch):
_log.info("archive_last_test_results: patch='%s'" % patch.id())
archive = Mock()
archive.filename = "mock-archive-%s.zip" % patch.id()
return archive
def build_style(self):
return "both"
def did_pass_testing_ews(self, patch):
return False
class FailingTestCommitQueue(MockCommitQueue):
def __init__(self, error_plan, test_failure_plan):
MockCommitQueue.__init__(self, error_plan)
self._test_run_counter = -1 # Special value to indicate tests have never been run.
self._test_failure_plan = test_failure_plan
def run_command(self, command):
if command[0] == "build-and-test":
self._test_run_counter += 1
MockCommitQueue.run_command(self, command)
def _mock_test_result(self, testname):
return test_results.TestResult(testname, [test_failures.FailureTextMismatch()])
def test_results(self):
# Doesn't make sense to ask for the test_results until the tests have run at least once.
assert(self._test_run_counter >= 0)
failures_for_run = self._test_failure_plan[self._test_run_counter]
results = LayoutTestResults(map(self._mock_test_result, failures_for_run))
# This makes the results trustable by ExpectedFailures.
results.set_failure_limit_count(10)
return results
# We use GoldenScriptError to make sure that the code under test throws the
# correct (i.e., golden) exception.
class GoldenScriptError(ScriptError):
pass
class CommitQueueTaskTest(unittest.TestCase):
def _run_through_task(self, commit_queue, expected_logs, expected_exception=None, expect_retry=False):
self.maxDiff = None
tool = MockTool(log_executive=True)
patch = tool.bugs.fetch_attachment(10000)
task = CommitQueueTask(commit_queue, patch)
success = OutputCapture().assert_outputs(self, task.run, expected_logs=expected_logs, expected_exception=expected_exception)
if not expected_exception:
self.assertEqual(success, not expect_retry)
return task
def test_success_case(self):
commit_queue = MockCommitQueue([])
expected_logs = """run_webkit_patch: ['clean']
command_passed: success_message='Cleaned working directory' patch='10000'
run_webkit_patch: ['update']
command_passed: success_message='Updated working directory' patch='10000'
run_webkit_patch: ['apply-attachment', '--no-update', '--non-interactive', 10000]
command_passed: success_message='Applied patch' patch='10000'
run_webkit_patch: ['validate-changelog', '--check-oops', '--non-interactive', 10000]
command_passed: success_message='ChangeLog validated' patch='10000'
run_webkit_patch: ['build', '--no-clean', '--no-update', '--build-style=both']
command_passed: success_message='Built patch' patch='10000'
run_webkit_patch: ['build-and-test', '--no-clean', '--no-update', '--test', '--non-interactive']
command_passed: success_message='Passed tests' patch='10000'
run_webkit_patch: ['land-attachment', '--force-clean', '--non-interactive', '--parent-command=commit-queue', 10000]
command_passed: success_message='Landed patch' patch='10000'
"""
self._run_through_task(commit_queue, expected_logs)
def test_fast_success_case(self):
commit_queue = MockCommitQueue([])
commit_queue.did_pass_testing_ews = lambda patch: True
expected_logs = """run_webkit_patch: ['clean']
command_passed: success_message='Cleaned working directory' patch='10000'
run_webkit_patch: ['update']
command_passed: success_message='Updated working directory' patch='10000'
run_webkit_patch: ['apply-attachment', '--no-update', '--non-interactive', 10000]
command_passed: success_message='Applied patch' patch='10000'
run_webkit_patch: ['validate-changelog', '--check-oops', '--non-interactive', 10000]
command_passed: success_message='ChangeLog validated' patch='10000'
run_webkit_patch: ['build', '--no-clean', '--no-update', '--build-style=both']
command_passed: success_message='Built patch' patch='10000'
run_webkit_patch: ['land-attachment', '--force-clean', '--non-interactive', '--parent-command=commit-queue', 10000]
command_passed: success_message='Landed patch' patch='10000'
"""
self._run_through_task(commit_queue, expected_logs)
def test_clean_failure(self):
commit_queue = MockCommitQueue([
ScriptError("MOCK clean failure"),
])
expected_logs = """run_webkit_patch: ['clean']
command_failed: failure_message='Unable to clean working directory' script_error='MOCK clean failure' patch='10000'
"""
self._run_through_task(commit_queue, expected_logs, expect_retry=True)
def test_update_failure(self):
commit_queue = MockCommitQueue([
None,
ScriptError("MOCK update failure"),
])
expected_logs = """run_webkit_patch: ['clean']
command_passed: success_message='Cleaned working directory' patch='10000'
run_webkit_patch: ['update']
command_failed: failure_message='Unable to update working directory' script_error='MOCK update failure' patch='10000'
"""
self._run_through_task(commit_queue, expected_logs, expect_retry=True)
def test_apply_failure(self):
commit_queue = MockCommitQueue([
None,
None,
GoldenScriptError("MOCK apply failure"),
])
expected_logs = """run_webkit_patch: ['clean']
command_passed: success_message='Cleaned working directory' patch='10000'
run_webkit_patch: ['update']
command_passed: success_message='Updated working directory' patch='10000'
run_webkit_patch: ['apply-attachment', '--no-update', '--non-interactive', 10000]
command_failed: failure_message='Patch does not apply' script_error='MOCK apply failure' patch='10000'
"""
self._run_through_task(commit_queue, expected_logs, GoldenScriptError)
def test_validate_changelog_failure(self):
commit_queue = MockCommitQueue([
None,
None,
None,
GoldenScriptError("MOCK validate failure"),
])
expected_logs = """run_webkit_patch: ['clean']
command_passed: success_message='Cleaned working directory' patch='10000'
run_webkit_patch: ['update']
command_passed: success_message='Updated working directory' patch='10000'
run_webkit_patch: ['apply-attachment', '--no-update', '--non-interactive', 10000]
command_passed: success_message='Applied patch' patch='10000'
run_webkit_patch: ['validate-changelog', '--check-oops', '--non-interactive', 10000]
command_failed: failure_message='ChangeLog did not pass validation' script_error='MOCK validate failure' patch='10000'
"""
self._run_through_task(commit_queue, expected_logs, GoldenScriptError)
def test_build_failure(self):
commit_queue = MockCommitQueue([
None,
None,
None,
None,
GoldenScriptError("MOCK build failure"),
])
expected_logs = """run_webkit_patch: ['clean']
command_passed: success_message='Cleaned working directory' patch='10000'
run_webkit_patch: ['update']
command_passed: success_message='Updated working directory' patch='10000'
run_webkit_patch: ['apply-attachment', '--no-update', '--non-interactive', 10000]
command_passed: success_message='Applied patch' patch='10000'
run_webkit_patch: ['validate-changelog', '--check-oops', '--non-interactive', 10000]
command_passed: success_message='ChangeLog validated' patch='10000'
run_webkit_patch: ['build', '--no-clean', '--no-update', '--build-style=both']
command_failed: failure_message='Patch does not build' script_error='MOCK build failure' patch='10000'
run_webkit_patch: ['build', '--force-clean', '--no-update', '--build-style=both']
command_passed: success_message='Able to build without patch' patch='10000'
"""
self._run_through_task(commit_queue, expected_logs, GoldenScriptError)
def test_red_build_failure(self):
commit_queue = MockCommitQueue([
None,
None,
None,
None,
ScriptError("MOCK build failure"),
ScriptError("MOCK clean build failure"),
])
expected_logs = """run_webkit_patch: ['clean']
command_passed: success_message='Cleaned working directory' patch='10000'
run_webkit_patch: ['update']
command_passed: success_message='Updated working directory' patch='10000'
run_webkit_patch: ['apply-attachment', '--no-update', '--non-interactive', 10000]
command_passed: success_message='Applied patch' patch='10000'
run_webkit_patch: ['validate-changelog', '--check-oops', '--non-interactive', 10000]
command_passed: success_message='ChangeLog validated' patch='10000'
run_webkit_patch: ['build', '--no-clean', '--no-update', '--build-style=both']
command_failed: failure_message='Patch does not build' script_error='MOCK build failure' patch='10000'
run_webkit_patch: ['build', '--force-clean', '--no-update', '--build-style=both']
command_failed: failure_message='Unable to build without patch' script_error='MOCK clean build failure' patch='10000'
"""
self._run_through_task(commit_queue, expected_logs, expect_retry=True)
def test_flaky_test_failure(self):
commit_queue = MockCommitQueue([
None,
None,
None,
None,
None,
ScriptError("MOCK tests failure"),
])
# CommitQueueTask will only report flaky tests if we successfully parsed
# results.json and returned a LayoutTestResults object, so we fake one.
commit_queue.test_results = lambda: LayoutTestResults([])
expected_logs = """run_webkit_patch: ['clean']
command_passed: success_message='Cleaned working directory' patch='10000'
run_webkit_patch: ['update']
command_passed: success_message='Updated working directory' patch='10000'
run_webkit_patch: ['apply-attachment', '--no-update', '--non-interactive', 10000]
command_passed: success_message='Applied patch' patch='10000'
run_webkit_patch: ['validate-changelog', '--check-oops', '--non-interactive', 10000]
command_passed: success_message='ChangeLog validated' patch='10000'
run_webkit_patch: ['build', '--no-clean', '--no-update', '--build-style=both']
command_passed: success_message='Built patch' patch='10000'
run_webkit_patch: ['build-and-test', '--no-clean', '--no-update', '--test', '--non-interactive']
command_failed: failure_message='Patch does not pass tests' script_error='MOCK tests failure' patch='10000'
archive_last_test_results: patch='10000'
run_webkit_patch: ['build-and-test', '--no-clean', '--no-update', '--test', '--non-interactive']
command_passed: success_message='Passed tests' patch='10000'
report_flaky_tests: patch='10000' flaky_tests='[]' archive='mock-archive-10000.zip'
run_webkit_patch: ['land-attachment', '--force-clean', '--non-interactive', '--parent-command=commit-queue', 10000]
command_passed: success_message='Landed patch' patch='10000'
"""
self._run_through_task(commit_queue, expected_logs)
def test_failed_archive(self):
commit_queue = MockCommitQueue([
None,
None,
None,
None,
None,
ScriptError("MOCK tests failure"),
])
commit_queue.test_results = lambda: LayoutTestResults([])
# It's possible delegate to fail to archive layout tests, don't try to report
# flaky tests when that happens.
commit_queue.archive_last_test_results = lambda patch: None
expected_logs = """run_webkit_patch: ['clean']
command_passed: success_message='Cleaned working directory' patch='10000'
run_webkit_patch: ['update']
command_passed: success_message='Updated working directory' patch='10000'
run_webkit_patch: ['apply-attachment', '--no-update', '--non-interactive', 10000]
command_passed: success_message='Applied patch' patch='10000'
run_webkit_patch: ['validate-changelog', '--check-oops', '--non-interactive', 10000]
command_passed: success_message='ChangeLog validated' patch='10000'
run_webkit_patch: ['build', '--no-clean', '--no-update', '--build-style=both']
command_passed: success_message='Built patch' patch='10000'
run_webkit_patch: ['build-and-test', '--no-clean', '--no-update', '--test', '--non-interactive']
command_failed: failure_message='Patch does not pass tests' script_error='MOCK tests failure' patch='10000'
run_webkit_patch: ['build-and-test', '--no-clean', '--no-update', '--test', '--non-interactive']
command_passed: success_message='Passed tests' patch='10000'
run_webkit_patch: ['land-attachment', '--force-clean', '--non-interactive', '--parent-command=commit-queue', 10000]
command_passed: success_message='Landed patch' patch='10000'
"""
self._run_through_task(commit_queue, expected_logs)
def test_double_flaky_test_failure(self):
commit_queue = FailingTestCommitQueue([
None,
None,
None,
None,
None,
ScriptError("MOCK test failure"),
ScriptError("MOCK test failure again"),
], [
"foo.html",
"bar.html",
"foo.html",
])
# The (subtle) point of this test is that report_flaky_tests does not appear
# in the expected_logs for this run.
# Note also that there is no attempt to run the tests w/o the patch.
expected_logs = """run_webkit_patch: ['clean']
command_passed: success_message='Cleaned working directory' patch='10000'
run_webkit_patch: ['update']
command_passed: success_message='Updated working directory' patch='10000'
run_webkit_patch: ['apply-attachment', '--no-update', '--non-interactive', 10000]
command_passed: success_message='Applied patch' patch='10000'
run_webkit_patch: ['validate-changelog', '--check-oops', '--non-interactive', 10000]
command_passed: success_message='ChangeLog validated' patch='10000'
run_webkit_patch: ['build', '--no-clean', '--no-update', '--build-style=both']
command_passed: success_message='Built patch' patch='10000'
run_webkit_patch: ['build-and-test', '--no-clean', '--no-update', '--test', '--non-interactive']
command_failed: failure_message='Patch does not pass tests' script_error='MOCK test failure' patch='10000'
archive_last_test_results: patch='10000'
run_webkit_patch: ['build-and-test', '--no-clean', '--no-update', '--test', '--non-interactive']
command_failed: failure_message='Patch does not pass tests' script_error='MOCK test failure again' patch='10000'
"""
tool = MockTool(log_executive=True)
patch = tool.bugs.fetch_attachment(10000)
task = CommitQueueTask(commit_queue, patch)
success = OutputCapture().assert_outputs(self, task.run, expected_logs=expected_logs)
self.assertFalse(success)
def test_test_failure(self):
commit_queue = MockCommitQueue([
None,
None,
None,
None,
None,
GoldenScriptError("MOCK test failure"),
ScriptError("MOCK test failure again"),
])
expected_logs = """run_webkit_patch: ['clean']
command_passed: success_message='Cleaned working directory' patch='10000'
run_webkit_patch: ['update']
command_passed: success_message='Updated working directory' patch='10000'
run_webkit_patch: ['apply-attachment', '--no-update', '--non-interactive', 10000]
command_passed: success_message='Applied patch' patch='10000'
run_webkit_patch: ['validate-changelog', '--check-oops', '--non-interactive', 10000]
command_passed: success_message='ChangeLog validated' patch='10000'
run_webkit_patch: ['build', '--no-clean', '--no-update', '--build-style=both']
command_passed: success_message='Built patch' patch='10000'
run_webkit_patch: ['build-and-test', '--no-clean', '--no-update', '--test', '--non-interactive']
command_failed: failure_message='Patch does not pass tests' script_error='MOCK test failure' patch='10000'
archive_last_test_results: patch='10000'
run_webkit_patch: ['build-and-test', '--no-clean', '--no-update', '--test', '--non-interactive']
command_failed: failure_message='Patch does not pass tests' script_error='MOCK test failure again' patch='10000'
archive_last_test_results: patch='10000'
run_webkit_patch: ['build-and-test', '--force-clean', '--no-update', '--build', '--test', '--non-interactive']
command_passed: success_message='Able to pass tests without patch' patch='10000'
"""
self._run_through_task(commit_queue, expected_logs, GoldenScriptError)
def test_red_test_failure(self):
commit_queue = FailingTestCommitQueue([
None,
None,
None,
None,
None,
ScriptError("MOCK test failure"),
ScriptError("MOCK test failure again"),
ScriptError("MOCK clean test failure"),
], [
"foo.html",
"foo.html",
"foo.html",
])
# Tests always fail, and always return the same results, but we
# should still be able to land in this case!
expected_logs = """run_webkit_patch: ['clean']
command_passed: success_message='Cleaned working directory' patch='10000'
run_webkit_patch: ['update']
command_passed: success_message='Updated working directory' patch='10000'
run_webkit_patch: ['apply-attachment', '--no-update', '--non-interactive', 10000]
command_passed: success_message='Applied patch' patch='10000'
run_webkit_patch: ['validate-changelog', '--check-oops', '--non-interactive', 10000]
command_passed: success_message='ChangeLog validated' patch='10000'
run_webkit_patch: ['build', '--no-clean', '--no-update', '--build-style=both']
command_passed: success_message='Built patch' patch='10000'
run_webkit_patch: ['build-and-test', '--no-clean', '--no-update', '--test', '--non-interactive']
command_failed: failure_message='Patch does not pass tests' script_error='MOCK test failure' patch='10000'
archive_last_test_results: patch='10000'
run_webkit_patch: ['build-and-test', '--no-clean', '--no-update', '--test', '--non-interactive']
command_failed: failure_message='Patch does not pass tests' script_error='MOCK test failure again' patch='10000'
archive_last_test_results: patch='10000'
run_webkit_patch: ['build-and-test', '--force-clean', '--no-update', '--build', '--test', '--non-interactive']
command_failed: failure_message='Unable to pass tests without patch (tree is red?)' script_error='MOCK clean test failure' patch='10000'
run_webkit_patch: ['land-attachment', '--force-clean', '--non-interactive', '--parent-command=commit-queue', 10000]
command_passed: success_message='Landed patch' patch='10000'
"""
self._run_through_task(commit_queue, expected_logs)
def test_very_red_tree_retry(self):
lots_of_failing_tests = map(lambda num: "test-%s.html" % num, range(0, 100))
commit_queue = FailingTestCommitQueue([
None,
None,
None,
None,
None,
ScriptError("MOCK test failure"),
ScriptError("MOCK test failure again"),
ScriptError("MOCK clean test failure"),
], [
lots_of_failing_tests,
lots_of_failing_tests,
lots_of_failing_tests,
])
# Tests always fail, and return so many failures that we do not
# trust the results (see ExpectedFailures._can_trust_results) so we
# just give up and retry the patch.
expected_logs = """run_webkit_patch: ['clean']
command_passed: success_message='Cleaned working directory' patch='10000'
run_webkit_patch: ['update']
command_passed: success_message='Updated working directory' patch='10000'
run_webkit_patch: ['apply-attachment', '--no-update', '--non-interactive', 10000]
command_passed: success_message='Applied patch' patch='10000'
run_webkit_patch: ['validate-changelog', '--check-oops', '--non-interactive', 10000]
command_passed: success_message='ChangeLog validated' patch='10000'
run_webkit_patch: ['build', '--no-clean', '--no-update', '--build-style=both']
command_passed: success_message='Built patch' patch='10000'
run_webkit_patch: ['build-and-test', '--no-clean', '--no-update', '--test', '--non-interactive']
command_failed: failure_message='Patch does not pass tests' script_error='MOCK test failure' patch='10000'
archive_last_test_results: patch='10000'
run_webkit_patch: ['build-and-test', '--no-clean', '--no-update', '--test', '--non-interactive']
command_failed: failure_message='Patch does not pass tests' script_error='MOCK test failure again' patch='10000'
archive_last_test_results: patch='10000'
run_webkit_patch: ['build-and-test', '--force-clean', '--no-update', '--build', '--test', '--non-interactive']
command_failed: failure_message='Unable to pass tests without patch (tree is red?)' script_error='MOCK clean test failure' patch='10000'
"""
self._run_through_task(commit_queue, expected_logs, expect_retry=True)
def test_red_tree_patch_rejection(self):
commit_queue = FailingTestCommitQueue([
None,
None,
None,
None,
None,
GoldenScriptError("MOCK test failure"),
ScriptError("MOCK test failure again"),
ScriptError("MOCK clean test failure"),
], [
["foo.html", "bar.html"],
["foo.html", "bar.html"],
["foo.html"],
])
# Tests always fail, but the clean tree only fails one test
# while the patch fails two. So we should reject the patch!
expected_logs = """run_webkit_patch: ['clean']
command_passed: success_message='Cleaned working directory' patch='10000'
run_webkit_patch: ['update']
command_passed: success_message='Updated working directory' patch='10000'
run_webkit_patch: ['apply-attachment', '--no-update', '--non-interactive', 10000]
command_passed: success_message='Applied patch' patch='10000'
run_webkit_patch: ['validate-changelog', '--check-oops', '--non-interactive', 10000]
command_passed: success_message='ChangeLog validated' patch='10000'
run_webkit_patch: ['build', '--no-clean', '--no-update', '--build-style=both']
command_passed: success_message='Built patch' patch='10000'
run_webkit_patch: ['build-and-test', '--no-clean', '--no-update', '--test', '--non-interactive']
command_failed: failure_message='Patch does not pass tests' script_error='MOCK test failure' patch='10000'
archive_last_test_results: patch='10000'
run_webkit_patch: ['build-and-test', '--no-clean', '--no-update', '--test', '--non-interactive']
command_failed: failure_message='Patch does not pass tests' script_error='MOCK test failure again' patch='10000'
archive_last_test_results: patch='10000'
run_webkit_patch: ['build-and-test', '--force-clean', '--no-update', '--build', '--test', '--non-interactive']
command_failed: failure_message='Unable to pass tests without patch (tree is red?)' script_error='MOCK clean test failure' patch='10000'
"""
task = self._run_through_task(commit_queue, expected_logs, GoldenScriptError)
self.assertEqual(task.results_from_patch_test_run(task._patch).failing_tests(), ["foo.html", "bar.html"])
# failure_status_id should be of the test with patch (1), not the test without patch (2).
self.assertEqual(task.failure_status_id, 1)
def test_land_failure(self):
commit_queue = MockCommitQueue([
None,
None,
None,
None,
None,
None,
GoldenScriptError("MOCK land failure"),
])
expected_logs = """run_webkit_patch: ['clean']
command_passed: success_message='Cleaned working directory' patch='10000'
run_webkit_patch: ['update']
command_passed: success_message='Updated working directory' patch='10000'
run_webkit_patch: ['apply-attachment', '--no-update', '--non-interactive', 10000]
command_passed: success_message='Applied patch' patch='10000'
run_webkit_patch: ['validate-changelog', '--check-oops', '--non-interactive', 10000]
command_passed: success_message='ChangeLog validated' patch='10000'
run_webkit_patch: ['build', '--no-clean', '--no-update', '--build-style=both']
command_passed: success_message='Built patch' patch='10000'
run_webkit_patch: ['build-and-test', '--no-clean', '--no-update', '--test', '--non-interactive']
command_passed: success_message='Passed tests' patch='10000'
run_webkit_patch: ['land-attachment', '--force-clean', '--non-interactive', '--parent-command=commit-queue', 10000]
command_failed: failure_message='Unable to land patch' script_error='MOCK land failure' patch='10000'
"""
# FIXME: This should really be expect_retry=True for a better user experiance.
self._run_through_task(commit_queue, expected_logs, GoldenScriptError)
def _expect_validate(self, patch, is_valid):
class MockDelegate(object):
def refetch_patch(self, patch):
return patch
def expected_failures(self):
return ExpectedFailures()
task = CommitQueueTask(MockDelegate(), patch)
self.assertEqual(task.validate(), is_valid)
def _mock_patch(self, attachment_dict={}, bug_dict={'bug_status': 'NEW'}, committer="fake"):
bug = bugzilla.Bug(bug_dict, None)
patch = bugzilla.Attachment(attachment_dict, bug)
patch._committer = committer
return patch
def test_validate(self):
self._expect_validate(self._mock_patch(), True)
self._expect_validate(self._mock_patch({'is_obsolete': True}), False)
self._expect_validate(self._mock_patch(bug_dict={'bug_status': 'CLOSED'}), False)
self._expect_validate(self._mock_patch(committer=None), False)
self._expect_validate(self._mock_patch({'review': '-'}), False)
| bsd-3-clause | 3,934,182,110,898,596,400 | 49.180103 | 136 | 0.676773 | false |
blacklin/kbengine | kbe/res/scripts/common/Lib/encodings/cp864.py | 272 | 33663 | """ Python Character Mapping Codec generated from 'VENDORS/MICSFT/PC/CP864.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_map)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_map)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='cp864',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Map
decoding_map = codecs.make_identity_dict(range(256))
decoding_map.update({
0x0025: 0x066a, # ARABIC PERCENT SIGN
0x0080: 0x00b0, # DEGREE SIGN
0x0081: 0x00b7, # MIDDLE DOT
0x0082: 0x2219, # BULLET OPERATOR
0x0083: 0x221a, # SQUARE ROOT
0x0084: 0x2592, # MEDIUM SHADE
0x0085: 0x2500, # FORMS LIGHT HORIZONTAL
0x0086: 0x2502, # FORMS LIGHT VERTICAL
0x0087: 0x253c, # FORMS LIGHT VERTICAL AND HORIZONTAL
0x0088: 0x2524, # FORMS LIGHT VERTICAL AND LEFT
0x0089: 0x252c, # FORMS LIGHT DOWN AND HORIZONTAL
0x008a: 0x251c, # FORMS LIGHT VERTICAL AND RIGHT
0x008b: 0x2534, # FORMS LIGHT UP AND HORIZONTAL
0x008c: 0x2510, # FORMS LIGHT DOWN AND LEFT
0x008d: 0x250c, # FORMS LIGHT DOWN AND RIGHT
0x008e: 0x2514, # FORMS LIGHT UP AND RIGHT
0x008f: 0x2518, # FORMS LIGHT UP AND LEFT
0x0090: 0x03b2, # GREEK SMALL BETA
0x0091: 0x221e, # INFINITY
0x0092: 0x03c6, # GREEK SMALL PHI
0x0093: 0x00b1, # PLUS-OR-MINUS SIGN
0x0094: 0x00bd, # FRACTION 1/2
0x0095: 0x00bc, # FRACTION 1/4
0x0096: 0x2248, # ALMOST EQUAL TO
0x0097: 0x00ab, # LEFT POINTING GUILLEMET
0x0098: 0x00bb, # RIGHT POINTING GUILLEMET
0x0099: 0xfef7, # ARABIC LIGATURE LAM WITH ALEF WITH HAMZA ABOVE ISOLATED FORM
0x009a: 0xfef8, # ARABIC LIGATURE LAM WITH ALEF WITH HAMZA ABOVE FINAL FORM
0x009b: None, # UNDEFINED
0x009c: None, # UNDEFINED
0x009d: 0xfefb, # ARABIC LIGATURE LAM WITH ALEF ISOLATED FORM
0x009e: 0xfefc, # ARABIC LIGATURE LAM WITH ALEF FINAL FORM
0x009f: None, # UNDEFINED
0x00a1: 0x00ad, # SOFT HYPHEN
0x00a2: 0xfe82, # ARABIC LETTER ALEF WITH MADDA ABOVE FINAL FORM
0x00a5: 0xfe84, # ARABIC LETTER ALEF WITH HAMZA ABOVE FINAL FORM
0x00a6: None, # UNDEFINED
0x00a7: None, # UNDEFINED
0x00a8: 0xfe8e, # ARABIC LETTER ALEF FINAL FORM
0x00a9: 0xfe8f, # ARABIC LETTER BEH ISOLATED FORM
0x00aa: 0xfe95, # ARABIC LETTER TEH ISOLATED FORM
0x00ab: 0xfe99, # ARABIC LETTER THEH ISOLATED FORM
0x00ac: 0x060c, # ARABIC COMMA
0x00ad: 0xfe9d, # ARABIC LETTER JEEM ISOLATED FORM
0x00ae: 0xfea1, # ARABIC LETTER HAH ISOLATED FORM
0x00af: 0xfea5, # ARABIC LETTER KHAH ISOLATED FORM
0x00b0: 0x0660, # ARABIC-INDIC DIGIT ZERO
0x00b1: 0x0661, # ARABIC-INDIC DIGIT ONE
0x00b2: 0x0662, # ARABIC-INDIC DIGIT TWO
0x00b3: 0x0663, # ARABIC-INDIC DIGIT THREE
0x00b4: 0x0664, # ARABIC-INDIC DIGIT FOUR
0x00b5: 0x0665, # ARABIC-INDIC DIGIT FIVE
0x00b6: 0x0666, # ARABIC-INDIC DIGIT SIX
0x00b7: 0x0667, # ARABIC-INDIC DIGIT SEVEN
0x00b8: 0x0668, # ARABIC-INDIC DIGIT EIGHT
0x00b9: 0x0669, # ARABIC-INDIC DIGIT NINE
0x00ba: 0xfed1, # ARABIC LETTER FEH ISOLATED FORM
0x00bb: 0x061b, # ARABIC SEMICOLON
0x00bc: 0xfeb1, # ARABIC LETTER SEEN ISOLATED FORM
0x00bd: 0xfeb5, # ARABIC LETTER SHEEN ISOLATED FORM
0x00be: 0xfeb9, # ARABIC LETTER SAD ISOLATED FORM
0x00bf: 0x061f, # ARABIC QUESTION MARK
0x00c0: 0x00a2, # CENT SIGN
0x00c1: 0xfe80, # ARABIC LETTER HAMZA ISOLATED FORM
0x00c2: 0xfe81, # ARABIC LETTER ALEF WITH MADDA ABOVE ISOLATED FORM
0x00c3: 0xfe83, # ARABIC LETTER ALEF WITH HAMZA ABOVE ISOLATED FORM
0x00c4: 0xfe85, # ARABIC LETTER WAW WITH HAMZA ABOVE ISOLATED FORM
0x00c5: 0xfeca, # ARABIC LETTER AIN FINAL FORM
0x00c6: 0xfe8b, # ARABIC LETTER YEH WITH HAMZA ABOVE INITIAL FORM
0x00c7: 0xfe8d, # ARABIC LETTER ALEF ISOLATED FORM
0x00c8: 0xfe91, # ARABIC LETTER BEH INITIAL FORM
0x00c9: 0xfe93, # ARABIC LETTER TEH MARBUTA ISOLATED FORM
0x00ca: 0xfe97, # ARABIC LETTER TEH INITIAL FORM
0x00cb: 0xfe9b, # ARABIC LETTER THEH INITIAL FORM
0x00cc: 0xfe9f, # ARABIC LETTER JEEM INITIAL FORM
0x00cd: 0xfea3, # ARABIC LETTER HAH INITIAL FORM
0x00ce: 0xfea7, # ARABIC LETTER KHAH INITIAL FORM
0x00cf: 0xfea9, # ARABIC LETTER DAL ISOLATED FORM
0x00d0: 0xfeab, # ARABIC LETTER THAL ISOLATED FORM
0x00d1: 0xfead, # ARABIC LETTER REH ISOLATED FORM
0x00d2: 0xfeaf, # ARABIC LETTER ZAIN ISOLATED FORM
0x00d3: 0xfeb3, # ARABIC LETTER SEEN INITIAL FORM
0x00d4: 0xfeb7, # ARABIC LETTER SHEEN INITIAL FORM
0x00d5: 0xfebb, # ARABIC LETTER SAD INITIAL FORM
0x00d6: 0xfebf, # ARABIC LETTER DAD INITIAL FORM
0x00d7: 0xfec1, # ARABIC LETTER TAH ISOLATED FORM
0x00d8: 0xfec5, # ARABIC LETTER ZAH ISOLATED FORM
0x00d9: 0xfecb, # ARABIC LETTER AIN INITIAL FORM
0x00da: 0xfecf, # ARABIC LETTER GHAIN INITIAL FORM
0x00db: 0x00a6, # BROKEN VERTICAL BAR
0x00dc: 0x00ac, # NOT SIGN
0x00dd: 0x00f7, # DIVISION SIGN
0x00de: 0x00d7, # MULTIPLICATION SIGN
0x00df: 0xfec9, # ARABIC LETTER AIN ISOLATED FORM
0x00e0: 0x0640, # ARABIC TATWEEL
0x00e1: 0xfed3, # ARABIC LETTER FEH INITIAL FORM
0x00e2: 0xfed7, # ARABIC LETTER QAF INITIAL FORM
0x00e3: 0xfedb, # ARABIC LETTER KAF INITIAL FORM
0x00e4: 0xfedf, # ARABIC LETTER LAM INITIAL FORM
0x00e5: 0xfee3, # ARABIC LETTER MEEM INITIAL FORM
0x00e6: 0xfee7, # ARABIC LETTER NOON INITIAL FORM
0x00e7: 0xfeeb, # ARABIC LETTER HEH INITIAL FORM
0x00e8: 0xfeed, # ARABIC LETTER WAW ISOLATED FORM
0x00e9: 0xfeef, # ARABIC LETTER ALEF MAKSURA ISOLATED FORM
0x00ea: 0xfef3, # ARABIC LETTER YEH INITIAL FORM
0x00eb: 0xfebd, # ARABIC LETTER DAD ISOLATED FORM
0x00ec: 0xfecc, # ARABIC LETTER AIN MEDIAL FORM
0x00ed: 0xfece, # ARABIC LETTER GHAIN FINAL FORM
0x00ee: 0xfecd, # ARABIC LETTER GHAIN ISOLATED FORM
0x00ef: 0xfee1, # ARABIC LETTER MEEM ISOLATED FORM
0x00f0: 0xfe7d, # ARABIC SHADDA MEDIAL FORM
0x00f1: 0x0651, # ARABIC SHADDAH
0x00f2: 0xfee5, # ARABIC LETTER NOON ISOLATED FORM
0x00f3: 0xfee9, # ARABIC LETTER HEH ISOLATED FORM
0x00f4: 0xfeec, # ARABIC LETTER HEH MEDIAL FORM
0x00f5: 0xfef0, # ARABIC LETTER ALEF MAKSURA FINAL FORM
0x00f6: 0xfef2, # ARABIC LETTER YEH FINAL FORM
0x00f7: 0xfed0, # ARABIC LETTER GHAIN MEDIAL FORM
0x00f8: 0xfed5, # ARABIC LETTER QAF ISOLATED FORM
0x00f9: 0xfef5, # ARABIC LIGATURE LAM WITH ALEF WITH MADDA ABOVE ISOLATED FORM
0x00fa: 0xfef6, # ARABIC LIGATURE LAM WITH ALEF WITH MADDA ABOVE FINAL FORM
0x00fb: 0xfedd, # ARABIC LETTER LAM ISOLATED FORM
0x00fc: 0xfed9, # ARABIC LETTER KAF ISOLATED FORM
0x00fd: 0xfef1, # ARABIC LETTER YEH ISOLATED FORM
0x00fe: 0x25a0, # BLACK SQUARE
0x00ff: None, # UNDEFINED
})
### Decoding Table
decoding_table = (
'\x00' # 0x0000 -> NULL
'\x01' # 0x0001 -> START OF HEADING
'\x02' # 0x0002 -> START OF TEXT
'\x03' # 0x0003 -> END OF TEXT
'\x04' # 0x0004 -> END OF TRANSMISSION
'\x05' # 0x0005 -> ENQUIRY
'\x06' # 0x0006 -> ACKNOWLEDGE
'\x07' # 0x0007 -> BELL
'\x08' # 0x0008 -> BACKSPACE
'\t' # 0x0009 -> HORIZONTAL TABULATION
'\n' # 0x000a -> LINE FEED
'\x0b' # 0x000b -> VERTICAL TABULATION
'\x0c' # 0x000c -> FORM FEED
'\r' # 0x000d -> CARRIAGE RETURN
'\x0e' # 0x000e -> SHIFT OUT
'\x0f' # 0x000f -> SHIFT IN
'\x10' # 0x0010 -> DATA LINK ESCAPE
'\x11' # 0x0011 -> DEVICE CONTROL ONE
'\x12' # 0x0012 -> DEVICE CONTROL TWO
'\x13' # 0x0013 -> DEVICE CONTROL THREE
'\x14' # 0x0014 -> DEVICE CONTROL FOUR
'\x15' # 0x0015 -> NEGATIVE ACKNOWLEDGE
'\x16' # 0x0016 -> SYNCHRONOUS IDLE
'\x17' # 0x0017 -> END OF TRANSMISSION BLOCK
'\x18' # 0x0018 -> CANCEL
'\x19' # 0x0019 -> END OF MEDIUM
'\x1a' # 0x001a -> SUBSTITUTE
'\x1b' # 0x001b -> ESCAPE
'\x1c' # 0x001c -> FILE SEPARATOR
'\x1d' # 0x001d -> GROUP SEPARATOR
'\x1e' # 0x001e -> RECORD SEPARATOR
'\x1f' # 0x001f -> UNIT SEPARATOR
' ' # 0x0020 -> SPACE
'!' # 0x0021 -> EXCLAMATION MARK
'"' # 0x0022 -> QUOTATION MARK
'#' # 0x0023 -> NUMBER SIGN
'$' # 0x0024 -> DOLLAR SIGN
'\u066a' # 0x0025 -> ARABIC PERCENT SIGN
'&' # 0x0026 -> AMPERSAND
"'" # 0x0027 -> APOSTROPHE
'(' # 0x0028 -> LEFT PARENTHESIS
')' # 0x0029 -> RIGHT PARENTHESIS
'*' # 0x002a -> ASTERISK
'+' # 0x002b -> PLUS SIGN
',' # 0x002c -> COMMA
'-' # 0x002d -> HYPHEN-MINUS
'.' # 0x002e -> FULL STOP
'/' # 0x002f -> SOLIDUS
'0' # 0x0030 -> DIGIT ZERO
'1' # 0x0031 -> DIGIT ONE
'2' # 0x0032 -> DIGIT TWO
'3' # 0x0033 -> DIGIT THREE
'4' # 0x0034 -> DIGIT FOUR
'5' # 0x0035 -> DIGIT FIVE
'6' # 0x0036 -> DIGIT SIX
'7' # 0x0037 -> DIGIT SEVEN
'8' # 0x0038 -> DIGIT EIGHT
'9' # 0x0039 -> DIGIT NINE
':' # 0x003a -> COLON
';' # 0x003b -> SEMICOLON
'<' # 0x003c -> LESS-THAN SIGN
'=' # 0x003d -> EQUALS SIGN
'>' # 0x003e -> GREATER-THAN SIGN
'?' # 0x003f -> QUESTION MARK
'@' # 0x0040 -> COMMERCIAL AT
'A' # 0x0041 -> LATIN CAPITAL LETTER A
'B' # 0x0042 -> LATIN CAPITAL LETTER B
'C' # 0x0043 -> LATIN CAPITAL LETTER C
'D' # 0x0044 -> LATIN CAPITAL LETTER D
'E' # 0x0045 -> LATIN CAPITAL LETTER E
'F' # 0x0046 -> LATIN CAPITAL LETTER F
'G' # 0x0047 -> LATIN CAPITAL LETTER G
'H' # 0x0048 -> LATIN CAPITAL LETTER H
'I' # 0x0049 -> LATIN CAPITAL LETTER I
'J' # 0x004a -> LATIN CAPITAL LETTER J
'K' # 0x004b -> LATIN CAPITAL LETTER K
'L' # 0x004c -> LATIN CAPITAL LETTER L
'M' # 0x004d -> LATIN CAPITAL LETTER M
'N' # 0x004e -> LATIN CAPITAL LETTER N
'O' # 0x004f -> LATIN CAPITAL LETTER O
'P' # 0x0050 -> LATIN CAPITAL LETTER P
'Q' # 0x0051 -> LATIN CAPITAL LETTER Q
'R' # 0x0052 -> LATIN CAPITAL LETTER R
'S' # 0x0053 -> LATIN CAPITAL LETTER S
'T' # 0x0054 -> LATIN CAPITAL LETTER T
'U' # 0x0055 -> LATIN CAPITAL LETTER U
'V' # 0x0056 -> LATIN CAPITAL LETTER V
'W' # 0x0057 -> LATIN CAPITAL LETTER W
'X' # 0x0058 -> LATIN CAPITAL LETTER X
'Y' # 0x0059 -> LATIN CAPITAL LETTER Y
'Z' # 0x005a -> LATIN CAPITAL LETTER Z
'[' # 0x005b -> LEFT SQUARE BRACKET
'\\' # 0x005c -> REVERSE SOLIDUS
']' # 0x005d -> RIGHT SQUARE BRACKET
'^' # 0x005e -> CIRCUMFLEX ACCENT
'_' # 0x005f -> LOW LINE
'`' # 0x0060 -> GRAVE ACCENT
'a' # 0x0061 -> LATIN SMALL LETTER A
'b' # 0x0062 -> LATIN SMALL LETTER B
'c' # 0x0063 -> LATIN SMALL LETTER C
'd' # 0x0064 -> LATIN SMALL LETTER D
'e' # 0x0065 -> LATIN SMALL LETTER E
'f' # 0x0066 -> LATIN SMALL LETTER F
'g' # 0x0067 -> LATIN SMALL LETTER G
'h' # 0x0068 -> LATIN SMALL LETTER H
'i' # 0x0069 -> LATIN SMALL LETTER I
'j' # 0x006a -> LATIN SMALL LETTER J
'k' # 0x006b -> LATIN SMALL LETTER K
'l' # 0x006c -> LATIN SMALL LETTER L
'm' # 0x006d -> LATIN SMALL LETTER M
'n' # 0x006e -> LATIN SMALL LETTER N
'o' # 0x006f -> LATIN SMALL LETTER O
'p' # 0x0070 -> LATIN SMALL LETTER P
'q' # 0x0071 -> LATIN SMALL LETTER Q
'r' # 0x0072 -> LATIN SMALL LETTER R
's' # 0x0073 -> LATIN SMALL LETTER S
't' # 0x0074 -> LATIN SMALL LETTER T
'u' # 0x0075 -> LATIN SMALL LETTER U
'v' # 0x0076 -> LATIN SMALL LETTER V
'w' # 0x0077 -> LATIN SMALL LETTER W
'x' # 0x0078 -> LATIN SMALL LETTER X
'y' # 0x0079 -> LATIN SMALL LETTER Y
'z' # 0x007a -> LATIN SMALL LETTER Z
'{' # 0x007b -> LEFT CURLY BRACKET
'|' # 0x007c -> VERTICAL LINE
'}' # 0x007d -> RIGHT CURLY BRACKET
'~' # 0x007e -> TILDE
'\x7f' # 0x007f -> DELETE
'\xb0' # 0x0080 -> DEGREE SIGN
'\xb7' # 0x0081 -> MIDDLE DOT
'\u2219' # 0x0082 -> BULLET OPERATOR
'\u221a' # 0x0083 -> SQUARE ROOT
'\u2592' # 0x0084 -> MEDIUM SHADE
'\u2500' # 0x0085 -> FORMS LIGHT HORIZONTAL
'\u2502' # 0x0086 -> FORMS LIGHT VERTICAL
'\u253c' # 0x0087 -> FORMS LIGHT VERTICAL AND HORIZONTAL
'\u2524' # 0x0088 -> FORMS LIGHT VERTICAL AND LEFT
'\u252c' # 0x0089 -> FORMS LIGHT DOWN AND HORIZONTAL
'\u251c' # 0x008a -> FORMS LIGHT VERTICAL AND RIGHT
'\u2534' # 0x008b -> FORMS LIGHT UP AND HORIZONTAL
'\u2510' # 0x008c -> FORMS LIGHT DOWN AND LEFT
'\u250c' # 0x008d -> FORMS LIGHT DOWN AND RIGHT
'\u2514' # 0x008e -> FORMS LIGHT UP AND RIGHT
'\u2518' # 0x008f -> FORMS LIGHT UP AND LEFT
'\u03b2' # 0x0090 -> GREEK SMALL BETA
'\u221e' # 0x0091 -> INFINITY
'\u03c6' # 0x0092 -> GREEK SMALL PHI
'\xb1' # 0x0093 -> PLUS-OR-MINUS SIGN
'\xbd' # 0x0094 -> FRACTION 1/2
'\xbc' # 0x0095 -> FRACTION 1/4
'\u2248' # 0x0096 -> ALMOST EQUAL TO
'\xab' # 0x0097 -> LEFT POINTING GUILLEMET
'\xbb' # 0x0098 -> RIGHT POINTING GUILLEMET
'\ufef7' # 0x0099 -> ARABIC LIGATURE LAM WITH ALEF WITH HAMZA ABOVE ISOLATED FORM
'\ufef8' # 0x009a -> ARABIC LIGATURE LAM WITH ALEF WITH HAMZA ABOVE FINAL FORM
'\ufffe' # 0x009b -> UNDEFINED
'\ufffe' # 0x009c -> UNDEFINED
'\ufefb' # 0x009d -> ARABIC LIGATURE LAM WITH ALEF ISOLATED FORM
'\ufefc' # 0x009e -> ARABIC LIGATURE LAM WITH ALEF FINAL FORM
'\ufffe' # 0x009f -> UNDEFINED
'\xa0' # 0x00a0 -> NON-BREAKING SPACE
'\xad' # 0x00a1 -> SOFT HYPHEN
'\ufe82' # 0x00a2 -> ARABIC LETTER ALEF WITH MADDA ABOVE FINAL FORM
'\xa3' # 0x00a3 -> POUND SIGN
'\xa4' # 0x00a4 -> CURRENCY SIGN
'\ufe84' # 0x00a5 -> ARABIC LETTER ALEF WITH HAMZA ABOVE FINAL FORM
'\ufffe' # 0x00a6 -> UNDEFINED
'\ufffe' # 0x00a7 -> UNDEFINED
'\ufe8e' # 0x00a8 -> ARABIC LETTER ALEF FINAL FORM
'\ufe8f' # 0x00a9 -> ARABIC LETTER BEH ISOLATED FORM
'\ufe95' # 0x00aa -> ARABIC LETTER TEH ISOLATED FORM
'\ufe99' # 0x00ab -> ARABIC LETTER THEH ISOLATED FORM
'\u060c' # 0x00ac -> ARABIC COMMA
'\ufe9d' # 0x00ad -> ARABIC LETTER JEEM ISOLATED FORM
'\ufea1' # 0x00ae -> ARABIC LETTER HAH ISOLATED FORM
'\ufea5' # 0x00af -> ARABIC LETTER KHAH ISOLATED FORM
'\u0660' # 0x00b0 -> ARABIC-INDIC DIGIT ZERO
'\u0661' # 0x00b1 -> ARABIC-INDIC DIGIT ONE
'\u0662' # 0x00b2 -> ARABIC-INDIC DIGIT TWO
'\u0663' # 0x00b3 -> ARABIC-INDIC DIGIT THREE
'\u0664' # 0x00b4 -> ARABIC-INDIC DIGIT FOUR
'\u0665' # 0x00b5 -> ARABIC-INDIC DIGIT FIVE
'\u0666' # 0x00b6 -> ARABIC-INDIC DIGIT SIX
'\u0667' # 0x00b7 -> ARABIC-INDIC DIGIT SEVEN
'\u0668' # 0x00b8 -> ARABIC-INDIC DIGIT EIGHT
'\u0669' # 0x00b9 -> ARABIC-INDIC DIGIT NINE
'\ufed1' # 0x00ba -> ARABIC LETTER FEH ISOLATED FORM
'\u061b' # 0x00bb -> ARABIC SEMICOLON
'\ufeb1' # 0x00bc -> ARABIC LETTER SEEN ISOLATED FORM
'\ufeb5' # 0x00bd -> ARABIC LETTER SHEEN ISOLATED FORM
'\ufeb9' # 0x00be -> ARABIC LETTER SAD ISOLATED FORM
'\u061f' # 0x00bf -> ARABIC QUESTION MARK
'\xa2' # 0x00c0 -> CENT SIGN
'\ufe80' # 0x00c1 -> ARABIC LETTER HAMZA ISOLATED FORM
'\ufe81' # 0x00c2 -> ARABIC LETTER ALEF WITH MADDA ABOVE ISOLATED FORM
'\ufe83' # 0x00c3 -> ARABIC LETTER ALEF WITH HAMZA ABOVE ISOLATED FORM
'\ufe85' # 0x00c4 -> ARABIC LETTER WAW WITH HAMZA ABOVE ISOLATED FORM
'\ufeca' # 0x00c5 -> ARABIC LETTER AIN FINAL FORM
'\ufe8b' # 0x00c6 -> ARABIC LETTER YEH WITH HAMZA ABOVE INITIAL FORM
'\ufe8d' # 0x00c7 -> ARABIC LETTER ALEF ISOLATED FORM
'\ufe91' # 0x00c8 -> ARABIC LETTER BEH INITIAL FORM
'\ufe93' # 0x00c9 -> ARABIC LETTER TEH MARBUTA ISOLATED FORM
'\ufe97' # 0x00ca -> ARABIC LETTER TEH INITIAL FORM
'\ufe9b' # 0x00cb -> ARABIC LETTER THEH INITIAL FORM
'\ufe9f' # 0x00cc -> ARABIC LETTER JEEM INITIAL FORM
'\ufea3' # 0x00cd -> ARABIC LETTER HAH INITIAL FORM
'\ufea7' # 0x00ce -> ARABIC LETTER KHAH INITIAL FORM
'\ufea9' # 0x00cf -> ARABIC LETTER DAL ISOLATED FORM
'\ufeab' # 0x00d0 -> ARABIC LETTER THAL ISOLATED FORM
'\ufead' # 0x00d1 -> ARABIC LETTER REH ISOLATED FORM
'\ufeaf' # 0x00d2 -> ARABIC LETTER ZAIN ISOLATED FORM
'\ufeb3' # 0x00d3 -> ARABIC LETTER SEEN INITIAL FORM
'\ufeb7' # 0x00d4 -> ARABIC LETTER SHEEN INITIAL FORM
'\ufebb' # 0x00d5 -> ARABIC LETTER SAD INITIAL FORM
'\ufebf' # 0x00d6 -> ARABIC LETTER DAD INITIAL FORM
'\ufec1' # 0x00d7 -> ARABIC LETTER TAH ISOLATED FORM
'\ufec5' # 0x00d8 -> ARABIC LETTER ZAH ISOLATED FORM
'\ufecb' # 0x00d9 -> ARABIC LETTER AIN INITIAL FORM
'\ufecf' # 0x00da -> ARABIC LETTER GHAIN INITIAL FORM
'\xa6' # 0x00db -> BROKEN VERTICAL BAR
'\xac' # 0x00dc -> NOT SIGN
'\xf7' # 0x00dd -> DIVISION SIGN
'\xd7' # 0x00de -> MULTIPLICATION SIGN
'\ufec9' # 0x00df -> ARABIC LETTER AIN ISOLATED FORM
'\u0640' # 0x00e0 -> ARABIC TATWEEL
'\ufed3' # 0x00e1 -> ARABIC LETTER FEH INITIAL FORM
'\ufed7' # 0x00e2 -> ARABIC LETTER QAF INITIAL FORM
'\ufedb' # 0x00e3 -> ARABIC LETTER KAF INITIAL FORM
'\ufedf' # 0x00e4 -> ARABIC LETTER LAM INITIAL FORM
'\ufee3' # 0x00e5 -> ARABIC LETTER MEEM INITIAL FORM
'\ufee7' # 0x00e6 -> ARABIC LETTER NOON INITIAL FORM
'\ufeeb' # 0x00e7 -> ARABIC LETTER HEH INITIAL FORM
'\ufeed' # 0x00e8 -> ARABIC LETTER WAW ISOLATED FORM
'\ufeef' # 0x00e9 -> ARABIC LETTER ALEF MAKSURA ISOLATED FORM
'\ufef3' # 0x00ea -> ARABIC LETTER YEH INITIAL FORM
'\ufebd' # 0x00eb -> ARABIC LETTER DAD ISOLATED FORM
'\ufecc' # 0x00ec -> ARABIC LETTER AIN MEDIAL FORM
'\ufece' # 0x00ed -> ARABIC LETTER GHAIN FINAL FORM
'\ufecd' # 0x00ee -> ARABIC LETTER GHAIN ISOLATED FORM
'\ufee1' # 0x00ef -> ARABIC LETTER MEEM ISOLATED FORM
'\ufe7d' # 0x00f0 -> ARABIC SHADDA MEDIAL FORM
'\u0651' # 0x00f1 -> ARABIC SHADDAH
'\ufee5' # 0x00f2 -> ARABIC LETTER NOON ISOLATED FORM
'\ufee9' # 0x00f3 -> ARABIC LETTER HEH ISOLATED FORM
'\ufeec' # 0x00f4 -> ARABIC LETTER HEH MEDIAL FORM
'\ufef0' # 0x00f5 -> ARABIC LETTER ALEF MAKSURA FINAL FORM
'\ufef2' # 0x00f6 -> ARABIC LETTER YEH FINAL FORM
'\ufed0' # 0x00f7 -> ARABIC LETTER GHAIN MEDIAL FORM
'\ufed5' # 0x00f8 -> ARABIC LETTER QAF ISOLATED FORM
'\ufef5' # 0x00f9 -> ARABIC LIGATURE LAM WITH ALEF WITH MADDA ABOVE ISOLATED FORM
'\ufef6' # 0x00fa -> ARABIC LIGATURE LAM WITH ALEF WITH MADDA ABOVE FINAL FORM
'\ufedd' # 0x00fb -> ARABIC LETTER LAM ISOLATED FORM
'\ufed9' # 0x00fc -> ARABIC LETTER KAF ISOLATED FORM
'\ufef1' # 0x00fd -> ARABIC LETTER YEH ISOLATED FORM
'\u25a0' # 0x00fe -> BLACK SQUARE
'\ufffe' # 0x00ff -> UNDEFINED
)
### Encoding Map
encoding_map = {
0x0000: 0x0000, # NULL
0x0001: 0x0001, # START OF HEADING
0x0002: 0x0002, # START OF TEXT
0x0003: 0x0003, # END OF TEXT
0x0004: 0x0004, # END OF TRANSMISSION
0x0005: 0x0005, # ENQUIRY
0x0006: 0x0006, # ACKNOWLEDGE
0x0007: 0x0007, # BELL
0x0008: 0x0008, # BACKSPACE
0x0009: 0x0009, # HORIZONTAL TABULATION
0x000a: 0x000a, # LINE FEED
0x000b: 0x000b, # VERTICAL TABULATION
0x000c: 0x000c, # FORM FEED
0x000d: 0x000d, # CARRIAGE RETURN
0x000e: 0x000e, # SHIFT OUT
0x000f: 0x000f, # SHIFT IN
0x0010: 0x0010, # DATA LINK ESCAPE
0x0011: 0x0011, # DEVICE CONTROL ONE
0x0012: 0x0012, # DEVICE CONTROL TWO
0x0013: 0x0013, # DEVICE CONTROL THREE
0x0014: 0x0014, # DEVICE CONTROL FOUR
0x0015: 0x0015, # NEGATIVE ACKNOWLEDGE
0x0016: 0x0016, # SYNCHRONOUS IDLE
0x0017: 0x0017, # END OF TRANSMISSION BLOCK
0x0018: 0x0018, # CANCEL
0x0019: 0x0019, # END OF MEDIUM
0x001a: 0x001a, # SUBSTITUTE
0x001b: 0x001b, # ESCAPE
0x001c: 0x001c, # FILE SEPARATOR
0x001d: 0x001d, # GROUP SEPARATOR
0x001e: 0x001e, # RECORD SEPARATOR
0x001f: 0x001f, # UNIT SEPARATOR
0x0020: 0x0020, # SPACE
0x0021: 0x0021, # EXCLAMATION MARK
0x0022: 0x0022, # QUOTATION MARK
0x0023: 0x0023, # NUMBER SIGN
0x0024: 0x0024, # DOLLAR SIGN
0x0026: 0x0026, # AMPERSAND
0x0027: 0x0027, # APOSTROPHE
0x0028: 0x0028, # LEFT PARENTHESIS
0x0029: 0x0029, # RIGHT PARENTHESIS
0x002a: 0x002a, # ASTERISK
0x002b: 0x002b, # PLUS SIGN
0x002c: 0x002c, # COMMA
0x002d: 0x002d, # HYPHEN-MINUS
0x002e: 0x002e, # FULL STOP
0x002f: 0x002f, # SOLIDUS
0x0030: 0x0030, # DIGIT ZERO
0x0031: 0x0031, # DIGIT ONE
0x0032: 0x0032, # DIGIT TWO
0x0033: 0x0033, # DIGIT THREE
0x0034: 0x0034, # DIGIT FOUR
0x0035: 0x0035, # DIGIT FIVE
0x0036: 0x0036, # DIGIT SIX
0x0037: 0x0037, # DIGIT SEVEN
0x0038: 0x0038, # DIGIT EIGHT
0x0039: 0x0039, # DIGIT NINE
0x003a: 0x003a, # COLON
0x003b: 0x003b, # SEMICOLON
0x003c: 0x003c, # LESS-THAN SIGN
0x003d: 0x003d, # EQUALS SIGN
0x003e: 0x003e, # GREATER-THAN SIGN
0x003f: 0x003f, # QUESTION MARK
0x0040: 0x0040, # COMMERCIAL AT
0x0041: 0x0041, # LATIN CAPITAL LETTER A
0x0042: 0x0042, # LATIN CAPITAL LETTER B
0x0043: 0x0043, # LATIN CAPITAL LETTER C
0x0044: 0x0044, # LATIN CAPITAL LETTER D
0x0045: 0x0045, # LATIN CAPITAL LETTER E
0x0046: 0x0046, # LATIN CAPITAL LETTER F
0x0047: 0x0047, # LATIN CAPITAL LETTER G
0x0048: 0x0048, # LATIN CAPITAL LETTER H
0x0049: 0x0049, # LATIN CAPITAL LETTER I
0x004a: 0x004a, # LATIN CAPITAL LETTER J
0x004b: 0x004b, # LATIN CAPITAL LETTER K
0x004c: 0x004c, # LATIN CAPITAL LETTER L
0x004d: 0x004d, # LATIN CAPITAL LETTER M
0x004e: 0x004e, # LATIN CAPITAL LETTER N
0x004f: 0x004f, # LATIN CAPITAL LETTER O
0x0050: 0x0050, # LATIN CAPITAL LETTER P
0x0051: 0x0051, # LATIN CAPITAL LETTER Q
0x0052: 0x0052, # LATIN CAPITAL LETTER R
0x0053: 0x0053, # LATIN CAPITAL LETTER S
0x0054: 0x0054, # LATIN CAPITAL LETTER T
0x0055: 0x0055, # LATIN CAPITAL LETTER U
0x0056: 0x0056, # LATIN CAPITAL LETTER V
0x0057: 0x0057, # LATIN CAPITAL LETTER W
0x0058: 0x0058, # LATIN CAPITAL LETTER X
0x0059: 0x0059, # LATIN CAPITAL LETTER Y
0x005a: 0x005a, # LATIN CAPITAL LETTER Z
0x005b: 0x005b, # LEFT SQUARE BRACKET
0x005c: 0x005c, # REVERSE SOLIDUS
0x005d: 0x005d, # RIGHT SQUARE BRACKET
0x005e: 0x005e, # CIRCUMFLEX ACCENT
0x005f: 0x005f, # LOW LINE
0x0060: 0x0060, # GRAVE ACCENT
0x0061: 0x0061, # LATIN SMALL LETTER A
0x0062: 0x0062, # LATIN SMALL LETTER B
0x0063: 0x0063, # LATIN SMALL LETTER C
0x0064: 0x0064, # LATIN SMALL LETTER D
0x0065: 0x0065, # LATIN SMALL LETTER E
0x0066: 0x0066, # LATIN SMALL LETTER F
0x0067: 0x0067, # LATIN SMALL LETTER G
0x0068: 0x0068, # LATIN SMALL LETTER H
0x0069: 0x0069, # LATIN SMALL LETTER I
0x006a: 0x006a, # LATIN SMALL LETTER J
0x006b: 0x006b, # LATIN SMALL LETTER K
0x006c: 0x006c, # LATIN SMALL LETTER L
0x006d: 0x006d, # LATIN SMALL LETTER M
0x006e: 0x006e, # LATIN SMALL LETTER N
0x006f: 0x006f, # LATIN SMALL LETTER O
0x0070: 0x0070, # LATIN SMALL LETTER P
0x0071: 0x0071, # LATIN SMALL LETTER Q
0x0072: 0x0072, # LATIN SMALL LETTER R
0x0073: 0x0073, # LATIN SMALL LETTER S
0x0074: 0x0074, # LATIN SMALL LETTER T
0x0075: 0x0075, # LATIN SMALL LETTER U
0x0076: 0x0076, # LATIN SMALL LETTER V
0x0077: 0x0077, # LATIN SMALL LETTER W
0x0078: 0x0078, # LATIN SMALL LETTER X
0x0079: 0x0079, # LATIN SMALL LETTER Y
0x007a: 0x007a, # LATIN SMALL LETTER Z
0x007b: 0x007b, # LEFT CURLY BRACKET
0x007c: 0x007c, # VERTICAL LINE
0x007d: 0x007d, # RIGHT CURLY BRACKET
0x007e: 0x007e, # TILDE
0x007f: 0x007f, # DELETE
0x00a0: 0x00a0, # NON-BREAKING SPACE
0x00a2: 0x00c0, # CENT SIGN
0x00a3: 0x00a3, # POUND SIGN
0x00a4: 0x00a4, # CURRENCY SIGN
0x00a6: 0x00db, # BROKEN VERTICAL BAR
0x00ab: 0x0097, # LEFT POINTING GUILLEMET
0x00ac: 0x00dc, # NOT SIGN
0x00ad: 0x00a1, # SOFT HYPHEN
0x00b0: 0x0080, # DEGREE SIGN
0x00b1: 0x0093, # PLUS-OR-MINUS SIGN
0x00b7: 0x0081, # MIDDLE DOT
0x00bb: 0x0098, # RIGHT POINTING GUILLEMET
0x00bc: 0x0095, # FRACTION 1/4
0x00bd: 0x0094, # FRACTION 1/2
0x00d7: 0x00de, # MULTIPLICATION SIGN
0x00f7: 0x00dd, # DIVISION SIGN
0x03b2: 0x0090, # GREEK SMALL BETA
0x03c6: 0x0092, # GREEK SMALL PHI
0x060c: 0x00ac, # ARABIC COMMA
0x061b: 0x00bb, # ARABIC SEMICOLON
0x061f: 0x00bf, # ARABIC QUESTION MARK
0x0640: 0x00e0, # ARABIC TATWEEL
0x0651: 0x00f1, # ARABIC SHADDAH
0x0660: 0x00b0, # ARABIC-INDIC DIGIT ZERO
0x0661: 0x00b1, # ARABIC-INDIC DIGIT ONE
0x0662: 0x00b2, # ARABIC-INDIC DIGIT TWO
0x0663: 0x00b3, # ARABIC-INDIC DIGIT THREE
0x0664: 0x00b4, # ARABIC-INDIC DIGIT FOUR
0x0665: 0x00b5, # ARABIC-INDIC DIGIT FIVE
0x0666: 0x00b6, # ARABIC-INDIC DIGIT SIX
0x0667: 0x00b7, # ARABIC-INDIC DIGIT SEVEN
0x0668: 0x00b8, # ARABIC-INDIC DIGIT EIGHT
0x0669: 0x00b9, # ARABIC-INDIC DIGIT NINE
0x066a: 0x0025, # ARABIC PERCENT SIGN
0x2219: 0x0082, # BULLET OPERATOR
0x221a: 0x0083, # SQUARE ROOT
0x221e: 0x0091, # INFINITY
0x2248: 0x0096, # ALMOST EQUAL TO
0x2500: 0x0085, # FORMS LIGHT HORIZONTAL
0x2502: 0x0086, # FORMS LIGHT VERTICAL
0x250c: 0x008d, # FORMS LIGHT DOWN AND RIGHT
0x2510: 0x008c, # FORMS LIGHT DOWN AND LEFT
0x2514: 0x008e, # FORMS LIGHT UP AND RIGHT
0x2518: 0x008f, # FORMS LIGHT UP AND LEFT
0x251c: 0x008a, # FORMS LIGHT VERTICAL AND RIGHT
0x2524: 0x0088, # FORMS LIGHT VERTICAL AND LEFT
0x252c: 0x0089, # FORMS LIGHT DOWN AND HORIZONTAL
0x2534: 0x008b, # FORMS LIGHT UP AND HORIZONTAL
0x253c: 0x0087, # FORMS LIGHT VERTICAL AND HORIZONTAL
0x2592: 0x0084, # MEDIUM SHADE
0x25a0: 0x00fe, # BLACK SQUARE
0xfe7d: 0x00f0, # ARABIC SHADDA MEDIAL FORM
0xfe80: 0x00c1, # ARABIC LETTER HAMZA ISOLATED FORM
0xfe81: 0x00c2, # ARABIC LETTER ALEF WITH MADDA ABOVE ISOLATED FORM
0xfe82: 0x00a2, # ARABIC LETTER ALEF WITH MADDA ABOVE FINAL FORM
0xfe83: 0x00c3, # ARABIC LETTER ALEF WITH HAMZA ABOVE ISOLATED FORM
0xfe84: 0x00a5, # ARABIC LETTER ALEF WITH HAMZA ABOVE FINAL FORM
0xfe85: 0x00c4, # ARABIC LETTER WAW WITH HAMZA ABOVE ISOLATED FORM
0xfe8b: 0x00c6, # ARABIC LETTER YEH WITH HAMZA ABOVE INITIAL FORM
0xfe8d: 0x00c7, # ARABIC LETTER ALEF ISOLATED FORM
0xfe8e: 0x00a8, # ARABIC LETTER ALEF FINAL FORM
0xfe8f: 0x00a9, # ARABIC LETTER BEH ISOLATED FORM
0xfe91: 0x00c8, # ARABIC LETTER BEH INITIAL FORM
0xfe93: 0x00c9, # ARABIC LETTER TEH MARBUTA ISOLATED FORM
0xfe95: 0x00aa, # ARABIC LETTER TEH ISOLATED FORM
0xfe97: 0x00ca, # ARABIC LETTER TEH INITIAL FORM
0xfe99: 0x00ab, # ARABIC LETTER THEH ISOLATED FORM
0xfe9b: 0x00cb, # ARABIC LETTER THEH INITIAL FORM
0xfe9d: 0x00ad, # ARABIC LETTER JEEM ISOLATED FORM
0xfe9f: 0x00cc, # ARABIC LETTER JEEM INITIAL FORM
0xfea1: 0x00ae, # ARABIC LETTER HAH ISOLATED FORM
0xfea3: 0x00cd, # ARABIC LETTER HAH INITIAL FORM
0xfea5: 0x00af, # ARABIC LETTER KHAH ISOLATED FORM
0xfea7: 0x00ce, # ARABIC LETTER KHAH INITIAL FORM
0xfea9: 0x00cf, # ARABIC LETTER DAL ISOLATED FORM
0xfeab: 0x00d0, # ARABIC LETTER THAL ISOLATED FORM
0xfead: 0x00d1, # ARABIC LETTER REH ISOLATED FORM
0xfeaf: 0x00d2, # ARABIC LETTER ZAIN ISOLATED FORM
0xfeb1: 0x00bc, # ARABIC LETTER SEEN ISOLATED FORM
0xfeb3: 0x00d3, # ARABIC LETTER SEEN INITIAL FORM
0xfeb5: 0x00bd, # ARABIC LETTER SHEEN ISOLATED FORM
0xfeb7: 0x00d4, # ARABIC LETTER SHEEN INITIAL FORM
0xfeb9: 0x00be, # ARABIC LETTER SAD ISOLATED FORM
0xfebb: 0x00d5, # ARABIC LETTER SAD INITIAL FORM
0xfebd: 0x00eb, # ARABIC LETTER DAD ISOLATED FORM
0xfebf: 0x00d6, # ARABIC LETTER DAD INITIAL FORM
0xfec1: 0x00d7, # ARABIC LETTER TAH ISOLATED FORM
0xfec5: 0x00d8, # ARABIC LETTER ZAH ISOLATED FORM
0xfec9: 0x00df, # ARABIC LETTER AIN ISOLATED FORM
0xfeca: 0x00c5, # ARABIC LETTER AIN FINAL FORM
0xfecb: 0x00d9, # ARABIC LETTER AIN INITIAL FORM
0xfecc: 0x00ec, # ARABIC LETTER AIN MEDIAL FORM
0xfecd: 0x00ee, # ARABIC LETTER GHAIN ISOLATED FORM
0xfece: 0x00ed, # ARABIC LETTER GHAIN FINAL FORM
0xfecf: 0x00da, # ARABIC LETTER GHAIN INITIAL FORM
0xfed0: 0x00f7, # ARABIC LETTER GHAIN MEDIAL FORM
0xfed1: 0x00ba, # ARABIC LETTER FEH ISOLATED FORM
0xfed3: 0x00e1, # ARABIC LETTER FEH INITIAL FORM
0xfed5: 0x00f8, # ARABIC LETTER QAF ISOLATED FORM
0xfed7: 0x00e2, # ARABIC LETTER QAF INITIAL FORM
0xfed9: 0x00fc, # ARABIC LETTER KAF ISOLATED FORM
0xfedb: 0x00e3, # ARABIC LETTER KAF INITIAL FORM
0xfedd: 0x00fb, # ARABIC LETTER LAM ISOLATED FORM
0xfedf: 0x00e4, # ARABIC LETTER LAM INITIAL FORM
0xfee1: 0x00ef, # ARABIC LETTER MEEM ISOLATED FORM
0xfee3: 0x00e5, # ARABIC LETTER MEEM INITIAL FORM
0xfee5: 0x00f2, # ARABIC LETTER NOON ISOLATED FORM
0xfee7: 0x00e6, # ARABIC LETTER NOON INITIAL FORM
0xfee9: 0x00f3, # ARABIC LETTER HEH ISOLATED FORM
0xfeeb: 0x00e7, # ARABIC LETTER HEH INITIAL FORM
0xfeec: 0x00f4, # ARABIC LETTER HEH MEDIAL FORM
0xfeed: 0x00e8, # ARABIC LETTER WAW ISOLATED FORM
0xfeef: 0x00e9, # ARABIC LETTER ALEF MAKSURA ISOLATED FORM
0xfef0: 0x00f5, # ARABIC LETTER ALEF MAKSURA FINAL FORM
0xfef1: 0x00fd, # ARABIC LETTER YEH ISOLATED FORM
0xfef2: 0x00f6, # ARABIC LETTER YEH FINAL FORM
0xfef3: 0x00ea, # ARABIC LETTER YEH INITIAL FORM
0xfef5: 0x00f9, # ARABIC LIGATURE LAM WITH ALEF WITH MADDA ABOVE ISOLATED FORM
0xfef6: 0x00fa, # ARABIC LIGATURE LAM WITH ALEF WITH MADDA ABOVE FINAL FORM
0xfef7: 0x0099, # ARABIC LIGATURE LAM WITH ALEF WITH HAMZA ABOVE ISOLATED FORM
0xfef8: 0x009a, # ARABIC LIGATURE LAM WITH ALEF WITH HAMZA ABOVE FINAL FORM
0xfefb: 0x009d, # ARABIC LIGATURE LAM WITH ALEF ISOLATED FORM
0xfefc: 0x009e, # ARABIC LIGATURE LAM WITH ALEF FINAL FORM
}
| lgpl-3.0 | -5,030,003,695,984,570,000 | 47.786957 | 97 | 0.604076 | false |
vrv/tensorflow | tensorflow/contrib/tensor_forest/hybrid/python/kernel_tests/k_feature_routing_function_op_test.py | 103 | 3383 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for the routing function op."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib.tensor_forest.hybrid.ops import gen_training_ops
from tensorflow.contrib.tensor_forest.hybrid.python.ops import training_ops
from tensorflow.contrib.tensor_forest.python import tensor_forest
from tensorflow.python.framework import test_util
from tensorflow.python.platform import googletest
class KFeatureRoutingFunctionTest(test_util.TensorFlowTestCase):
def setUp(self):
self.input_data = [[-1., 0.], [-1., 2.],
[1., 0.], [1., -2.]]
self.input_labels = [0., 1., 2., 3.]
self.tree = [[1, 0], [-1, 0], [-1, 0]]
self.tree_weights = [[1.0, 0.0], [1.0, 0.0], [1.0, 0.0]]
self.tree_thresholds = [0., 0., 0.]
self.ops = training_ops.Load()
self.params = tensor_forest.ForestHParams(
num_features=2,
hybrid_tree_depth=2,
base_random_seed=10,
feature_bagging_fraction=1.0,
regularization_strength=0.01,
regularization="",
weight_init_mean=0.0,
weight_init_std=0.1)
self.params.num_nodes = 2**self.params.hybrid_tree_depth - 1
self.params.num_leaves = 2**(self.params.hybrid_tree_depth - 1)
self.params.num_features_per_node = (
self.params.feature_bagging_fraction * self.params.num_features)
self.params.regression = False
def testParams(self):
self.assertEquals(self.params.num_nodes, 3)
self.assertEquals(self.params.num_features, 2)
self.assertEquals(self.params.num_features_per_node, 2)
def testRoutingFunction(self):
with self.test_session():
route_tensor = gen_training_ops.k_feature_routing_function(
self.input_data,
self.tree_weights,
self.tree_thresholds,
max_nodes=self.params.num_nodes,
num_features_per_node=self.params.num_features_per_node,
layer_num=0,
random_seed=self.params.base_random_seed)
route_tensor_shape = route_tensor.get_shape()
self.assertEquals(len(route_tensor_shape), 2)
self.assertEquals(route_tensor_shape[0], 4)
self.assertEquals(route_tensor_shape[1], 3)
routes = route_tensor.eval()
print(routes)
# Point 1
# Node 1 is a decision node => probability = 1.0
self.assertAlmostEquals(1.0, routes[0, 0])
# Probability left output = 1.0 / (1.0 + exp(1.0)) = 0.26894142
self.assertAlmostEquals(0.26894142, routes[0, 1])
# Probability right = 1 - 0.2689414 = 0.73105858
self.assertAlmostEquals(0.73105858, routes[0, 2])
if __name__ == "__main__":
googletest.main()
| apache-2.0 | -6,217,180,267,416,890,000 | 37.011236 | 80 | 0.655631 | false |
Trixter69/BookMarka | lib/cherrypy/_cptools.py | 55 | 19093 | """CherryPy tools. A "tool" is any helper, adapted to CP.
Tools are usually designed to be used in a variety of ways (although some
may only offer one if they choose):
Library calls
All tools are callables that can be used wherever needed.
The arguments are straightforward and should be detailed within the
docstring.
Function decorators
All tools, when called, may be used as decorators which configure
individual CherryPy page handlers (methods on the CherryPy tree).
That is, "@tools.anytool()" should "turn on" the tool via the
decorated function's _cp_config attribute.
CherryPy config
If a tool exposes a "_setup" callable, it will be called
once per Request (if the feature is "turned on" via config).
Tools may be implemented as any object with a namespace. The builtins
are generally either modules or instances of the tools.Tool class.
"""
import sys
import warnings
import cherrypy
def _getargs(func):
"""Return the names of all static arguments to the given function."""
# Use this instead of importing inspect for less mem overhead.
import types
if sys.version_info >= (3, 0):
if isinstance(func, types.MethodType):
func = func.__func__
co = func.__code__
else:
if isinstance(func, types.MethodType):
func = func.im_func
co = func.func_code
return co.co_varnames[:co.co_argcount]
_attr_error = (
"CherryPy Tools cannot be turned on directly. Instead, turn them "
"on via config, or use them as decorators on your page handlers."
)
class Tool(object):
"""A registered function for use with CherryPy request-processing hooks.
help(tool.callable) should give you more information about this Tool.
"""
namespace = "tools"
def __init__(self, point, callable, name=None, priority=50):
self._point = point
self.callable = callable
self._name = name
self._priority = priority
self.__doc__ = self.callable.__doc__
self._setargs()
def _get_on(self):
raise AttributeError(_attr_error)
def _set_on(self, value):
raise AttributeError(_attr_error)
on = property(_get_on, _set_on)
def _setargs(self):
"""Copy func parameter names to obj attributes."""
try:
for arg in _getargs(self.callable):
setattr(self, arg, None)
except (TypeError, AttributeError):
if hasattr(self.callable, "__call__"):
for arg in _getargs(self.callable.__call__):
setattr(self, arg, None)
# IronPython 1.0 raises NotImplementedError because
# inspect.getargspec tries to access Python bytecode
# in co_code attribute.
except NotImplementedError:
pass
# IronPython 1B1 may raise IndexError in some cases,
# but if we trap it here it doesn't prevent CP from working.
except IndexError:
pass
def _merged_args(self, d=None):
"""Return a dict of configuration entries for this Tool."""
if d:
conf = d.copy()
else:
conf = {}
tm = cherrypy.serving.request.toolmaps[self.namespace]
if self._name in tm:
conf.update(tm[self._name])
if "on" in conf:
del conf["on"]
return conf
def __call__(self, *args, **kwargs):
"""Compile-time decorator (turn on the tool in config).
For example::
@tools.proxy()
def whats_my_base(self):
return cherrypy.request.base
whats_my_base.exposed = True
"""
if args:
raise TypeError("The %r Tool does not accept positional "
"arguments; you must use keyword arguments."
% self._name)
def tool_decorator(f):
if not hasattr(f, "_cp_config"):
f._cp_config = {}
subspace = self.namespace + "." + self._name + "."
f._cp_config[subspace + "on"] = True
for k, v in kwargs.items():
f._cp_config[subspace + k] = v
return f
return tool_decorator
def _setup(self):
"""Hook this tool into cherrypy.request.
The standard CherryPy request object will automatically call this
method when the tool is "turned on" in config.
"""
conf = self._merged_args()
p = conf.pop("priority", None)
if p is None:
p = getattr(self.callable, "priority", self._priority)
cherrypy.serving.request.hooks.attach(self._point, self.callable,
priority=p, **conf)
class HandlerTool(Tool):
"""Tool which is called 'before main', that may skip normal handlers.
If the tool successfully handles the request (by setting response.body),
if should return True. This will cause CherryPy to skip any 'normal' page
handler. If the tool did not handle the request, it should return False
to tell CherryPy to continue on and call the normal page handler. If the
tool is declared AS a page handler (see the 'handler' method), returning
False will raise NotFound.
"""
def __init__(self, callable, name=None):
Tool.__init__(self, 'before_handler', callable, name)
def handler(self, *args, **kwargs):
"""Use this tool as a CherryPy page handler.
For example::
class Root:
nav = tools.staticdir.handler(section="/nav", dir="nav",
root=absDir)
"""
def handle_func(*a, **kw):
handled = self.callable(*args, **self._merged_args(kwargs))
if not handled:
raise cherrypy.NotFound()
return cherrypy.serving.response.body
handle_func.exposed = True
return handle_func
def _wrapper(self, **kwargs):
if self.callable(**kwargs):
cherrypy.serving.request.handler = None
def _setup(self):
"""Hook this tool into cherrypy.request.
The standard CherryPy request object will automatically call this
method when the tool is "turned on" in config.
"""
conf = self._merged_args()
p = conf.pop("priority", None)
if p is None:
p = getattr(self.callable, "priority", self._priority)
cherrypy.serving.request.hooks.attach(self._point, self._wrapper,
priority=p, **conf)
class HandlerWrapperTool(Tool):
"""Tool which wraps request.handler in a provided wrapper function.
The 'newhandler' arg must be a handler wrapper function that takes a
'next_handler' argument, plus ``*args`` and ``**kwargs``. Like all
page handler
functions, it must return an iterable for use as cherrypy.response.body.
For example, to allow your 'inner' page handlers to return dicts
which then get interpolated into a template::
def interpolator(next_handler, *args, **kwargs):
filename = cherrypy.request.config.get('template')
cherrypy.response.template = env.get_template(filename)
response_dict = next_handler(*args, **kwargs)
return cherrypy.response.template.render(**response_dict)
cherrypy.tools.jinja = HandlerWrapperTool(interpolator)
"""
def __init__(self, newhandler, point='before_handler', name=None,
priority=50):
self.newhandler = newhandler
self._point = point
self._name = name
self._priority = priority
def callable(self, *args, **kwargs):
innerfunc = cherrypy.serving.request.handler
def wrap(*args, **kwargs):
return self.newhandler(innerfunc, *args, **kwargs)
cherrypy.serving.request.handler = wrap
class ErrorTool(Tool):
"""Tool which is used to replace the default request.error_response."""
def __init__(self, callable, name=None):
Tool.__init__(self, None, callable, name)
def _wrapper(self):
self.callable(**self._merged_args())
def _setup(self):
"""Hook this tool into cherrypy.request.
The standard CherryPy request object will automatically call this
method when the tool is "turned on" in config.
"""
cherrypy.serving.request.error_response = self._wrapper
# Builtin tools #
from cherrypy.lib import cptools, encoding, auth, static, jsontools
from cherrypy.lib import sessions as _sessions, xmlrpcutil as _xmlrpc
from cherrypy.lib import caching as _caching
from cherrypy.lib import auth_basic, auth_digest
class SessionTool(Tool):
"""Session Tool for CherryPy.
sessions.locking
When 'implicit' (the default), the session will be locked for you,
just before running the page handler.
When 'early', the session will be locked before reading the request
body. This is off by default for safety reasons; for example,
a large upload would block the session, denying an AJAX
progress meter
(`issue <https://bitbucket.org/cherrypy/cherrypy/issue/630>`_).
When 'explicit' (or any other value), you need to call
cherrypy.session.acquire_lock() yourself before using
session data.
"""
def __init__(self):
# _sessions.init must be bound after headers are read
Tool.__init__(self, 'before_request_body', _sessions.init)
def _lock_session(self):
cherrypy.serving.session.acquire_lock()
def _setup(self):
"""Hook this tool into cherrypy.request.
The standard CherryPy request object will automatically call this
method when the tool is "turned on" in config.
"""
hooks = cherrypy.serving.request.hooks
conf = self._merged_args()
p = conf.pop("priority", None)
if p is None:
p = getattr(self.callable, "priority", self._priority)
hooks.attach(self._point, self.callable, priority=p, **conf)
locking = conf.pop('locking', 'implicit')
if locking == 'implicit':
hooks.attach('before_handler', self._lock_session)
elif locking == 'early':
# Lock before the request body (but after _sessions.init runs!)
hooks.attach('before_request_body', self._lock_session,
priority=60)
else:
# Don't lock
pass
hooks.attach('before_finalize', _sessions.save)
hooks.attach('on_end_request', _sessions.close)
def regenerate(self):
"""Drop the current session and make a new one (with a new id)."""
sess = cherrypy.serving.session
sess.regenerate()
# Grab cookie-relevant tool args
conf = dict([(k, v) for k, v in self._merged_args().items()
if k in ('path', 'path_header', 'name', 'timeout',
'domain', 'secure')])
_sessions.set_response_cookie(**conf)
class XMLRPCController(object):
"""A Controller (page handler collection) for XML-RPC.
To use it, have your controllers subclass this base class (it will
turn on the tool for you).
You can also supply the following optional config entries::
tools.xmlrpc.encoding: 'utf-8'
tools.xmlrpc.allow_none: 0
XML-RPC is a rather discontinuous layer over HTTP; dispatching to the
appropriate handler must first be performed according to the URL, and
then a second dispatch step must take place according to the RPC method
specified in the request body. It also allows a superfluous "/RPC2"
prefix in the URL, supplies its own handler args in the body, and
requires a 200 OK "Fault" response instead of 404 when the desired
method is not found.
Therefore, XML-RPC cannot be implemented for CherryPy via a Tool alone.
This Controller acts as the dispatch target for the first half (based
on the URL); it then reads the RPC method from the request body and
does its own second dispatch step based on that method. It also reads
body params, and returns a Fault on error.
The XMLRPCDispatcher strips any /RPC2 prefix; if you aren't using /RPC2
in your URL's, you can safely skip turning on the XMLRPCDispatcher.
Otherwise, you need to use declare it in config::
request.dispatch: cherrypy.dispatch.XMLRPCDispatcher()
"""
# Note we're hard-coding this into the 'tools' namespace. We could do
# a huge amount of work to make it relocatable, but the only reason why
# would be if someone actually disabled the default_toolbox. Meh.
_cp_config = {'tools.xmlrpc.on': True}
def default(self, *vpath, **params):
rpcparams, rpcmethod = _xmlrpc.process_body()
subhandler = self
for attr in str(rpcmethod).split('.'):
subhandler = getattr(subhandler, attr, None)
if subhandler and getattr(subhandler, "exposed", False):
body = subhandler(*(vpath + rpcparams), **params)
else:
# https://bitbucket.org/cherrypy/cherrypy/issue/533
# if a method is not found, an xmlrpclib.Fault should be returned
# raising an exception here will do that; see
# cherrypy.lib.xmlrpcutil.on_error
raise Exception('method "%s" is not supported' % attr)
conf = cherrypy.serving.request.toolmaps['tools'].get("xmlrpc", {})
_xmlrpc.respond(body,
conf.get('encoding', 'utf-8'),
conf.get('allow_none', 0))
return cherrypy.serving.response.body
default.exposed = True
class SessionAuthTool(HandlerTool):
def _setargs(self):
for name in dir(cptools.SessionAuth):
if not name.startswith("__"):
setattr(self, name, None)
class CachingTool(Tool):
"""Caching Tool for CherryPy."""
def _wrapper(self, **kwargs):
request = cherrypy.serving.request
if _caching.get(**kwargs):
request.handler = None
else:
if request.cacheable:
# Note the devious technique here of adding hooks on the fly
request.hooks.attach('before_finalize', _caching.tee_output,
priority=90)
_wrapper.priority = 20
def _setup(self):
"""Hook caching into cherrypy.request."""
conf = self._merged_args()
p = conf.pop("priority", None)
cherrypy.serving.request.hooks.attach('before_handler', self._wrapper,
priority=p, **conf)
class Toolbox(object):
"""A collection of Tools.
This object also functions as a config namespace handler for itself.
Custom toolboxes should be added to each Application's toolboxes dict.
"""
def __init__(self, namespace):
self.namespace = namespace
def __setattr__(self, name, value):
# If the Tool._name is None, supply it from the attribute name.
if isinstance(value, Tool):
if value._name is None:
value._name = name
value.namespace = self.namespace
object.__setattr__(self, name, value)
def __enter__(self):
"""Populate request.toolmaps from tools specified in config."""
cherrypy.serving.request.toolmaps[self.namespace] = map = {}
def populate(k, v):
toolname, arg = k.split(".", 1)
bucket = map.setdefault(toolname, {})
bucket[arg] = v
return populate
def __exit__(self, exc_type, exc_val, exc_tb):
"""Run tool._setup() for each tool in our toolmap."""
map = cherrypy.serving.request.toolmaps.get(self.namespace)
if map:
for name, settings in map.items():
if settings.get("on", False):
tool = getattr(self, name)
tool._setup()
class DeprecatedTool(Tool):
_name = None
warnmsg = "This Tool is deprecated."
def __init__(self, point, warnmsg=None):
self.point = point
if warnmsg is not None:
self.warnmsg = warnmsg
def __call__(self, *args, **kwargs):
warnings.warn(self.warnmsg)
def tool_decorator(f):
return f
return tool_decorator
def _setup(self):
warnings.warn(self.warnmsg)
default_toolbox = _d = Toolbox("tools")
_d.session_auth = SessionAuthTool(cptools.session_auth)
_d.allow = Tool('on_start_resource', cptools.allow)
_d.proxy = Tool('before_request_body', cptools.proxy, priority=30)
_d.response_headers = Tool('on_start_resource', cptools.response_headers)
_d.log_tracebacks = Tool('before_error_response', cptools.log_traceback)
_d.log_headers = Tool('before_error_response', cptools.log_request_headers)
_d.log_hooks = Tool('on_end_request', cptools.log_hooks, priority=100)
_d.err_redirect = ErrorTool(cptools.redirect)
_d.etags = Tool('before_finalize', cptools.validate_etags, priority=75)
_d.decode = Tool('before_request_body', encoding.decode)
# the order of encoding, gzip, caching is important
_d.encode = Tool('before_handler', encoding.ResponseEncoder, priority=70)
_d.gzip = Tool('before_finalize', encoding.gzip, priority=80)
_d.staticdir = HandlerTool(static.staticdir)
_d.staticfile = HandlerTool(static.staticfile)
_d.sessions = SessionTool()
_d.xmlrpc = ErrorTool(_xmlrpc.on_error)
_d.caching = CachingTool('before_handler', _caching.get, 'caching')
_d.expires = Tool('before_finalize', _caching.expires)
_d.tidy = DeprecatedTool(
'before_finalize',
"The tidy tool has been removed from the standard distribution of "
"CherryPy. The most recent version can be found at "
"http://tools.cherrypy.org/browser.")
_d.nsgmls = DeprecatedTool(
'before_finalize',
"The nsgmls tool has been removed from the standard distribution of "
"CherryPy. The most recent version can be found at "
"http://tools.cherrypy.org/browser.")
_d.ignore_headers = Tool('before_request_body', cptools.ignore_headers)
_d.referer = Tool('before_request_body', cptools.referer)
_d.basic_auth = Tool('on_start_resource', auth.basic_auth)
_d.digest_auth = Tool('on_start_resource', auth.digest_auth)
_d.trailing_slash = Tool('before_handler', cptools.trailing_slash, priority=60)
_d.flatten = Tool('before_finalize', cptools.flatten)
_d.accept = Tool('on_start_resource', cptools.accept)
_d.redirect = Tool('on_start_resource', cptools.redirect)
_d.autovary = Tool('on_start_resource', cptools.autovary, priority=0)
_d.json_in = Tool('before_request_body', jsontools.json_in, priority=30)
_d.json_out = Tool('before_handler', jsontools.json_out, priority=30)
_d.auth_basic = Tool('before_handler', auth_basic.basic_auth, priority=1)
_d.auth_digest = Tool('before_handler', auth_digest.digest_auth, priority=1)
del _d, cptools, encoding, auth, static
| gpl-3.0 | 5,551,977,356,191,091,000 | 35.092628 | 79 | 0.623684 | false |
DARKPOP/external_chromium_org_third_party_skia | gm/rebaseline_server/download_actuals_test.py | 20 | 1491 | #!/usr/bin/python
"""
Copyright 2014 Google Inc.
Use of this source code is governed by a BSD-style license that can be
found in the LICENSE file.
Test download.py
TODO(epoger): Create a command to update the expected results (in
self._output_dir_expected) when appropriate. For now, you should:
1. examine the results in self.output_dir_actual and make sure they are ok
2. rm -rf self._output_dir_expected
3. mv self.output_dir_actual self._output_dir_expected
Although, if you're using an SVN checkout, this will blow away .svn directories
within self._output_dir_expected, which wouldn't be good...
"""
# System-level imports
import os
# Must fix up PYTHONPATH before importing from within Skia
import rs_fixpypath # pylint: disable=W0611
# Imports from within Skia
from py.utils import url_utils
import base_unittest
import download_actuals
class DownloadTest(base_unittest.TestCase):
def test_fetch(self):
"""Tests fetch() of GM results from actual-results.json ."""
downloader = download_actuals.Download(
actuals_base_url=url_utils.create_filepath_url(
os.path.join(self.input_dir, 'gm-actuals')),
gm_actuals_root_url=url_utils.create_filepath_url(
os.path.join(self.input_dir, 'fake-gm-imagefiles')))
downloader.fetch(
builder_name='Test-Android-GalaxyNexus-SGX540-Arm7-Release',
dest_dir=self.output_dir_actual)
def main():
base_unittest.main(DownloadTest)
if __name__ == '__main__':
main()
| bsd-3-clause | 6,476,254,156,548,060,000 | 27.673077 | 79 | 0.720993 | false |
piiswrong/mxnet | python/mxnet/gluon/model_zoo/vision/densenet.py | 10 | 7848 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# coding: utf-8
# pylint: disable= arguments-differ
"""DenseNet, implemented in Gluon."""
__all__ = ['DenseNet', 'densenet121', 'densenet161', 'densenet169', 'densenet201']
from ....context import cpu
from ...block import HybridBlock
from ... import nn
from ..custom_layers import HybridConcurrent, Identity
# Helpers
def _make_dense_block(num_layers, bn_size, growth_rate, dropout, stage_index):
out = nn.HybridSequential(prefix='stage%d_'%stage_index)
with out.name_scope():
for _ in range(num_layers):
out.add(_make_dense_layer(growth_rate, bn_size, dropout))
return out
def _make_dense_layer(growth_rate, bn_size, dropout):
new_features = nn.HybridSequential(prefix='')
new_features.add(nn.BatchNorm())
new_features.add(nn.Activation('relu'))
new_features.add(nn.Conv2D(bn_size * growth_rate, kernel_size=1, use_bias=False))
new_features.add(nn.BatchNorm())
new_features.add(nn.Activation('relu'))
new_features.add(nn.Conv2D(growth_rate, kernel_size=3, padding=1, use_bias=False))
if dropout:
new_features.add(nn.Dropout(dropout))
out = HybridConcurrent(concat_dim=1, prefix='')
out.add(Identity())
out.add(new_features)
return out
def _make_transition(num_output_features):
out = nn.HybridSequential(prefix='')
out.add(nn.BatchNorm())
out.add(nn.Activation('relu'))
out.add(nn.Conv2D(num_output_features, kernel_size=1, use_bias=False))
out.add(nn.AvgPool2D(pool_size=2, strides=2))
return out
# Net
class DenseNet(HybridBlock):
r"""Densenet-BC model from the
`"Densely Connected Convolutional Networks" <https://arxiv.org/pdf/1608.06993.pdf>`_ paper.
Parameters
----------
num_init_features : int
Number of filters to learn in the first convolution layer.
growth_rate : int
Number of filters to add each layer (`k` in the paper).
block_config : list of int
List of integers for numbers of layers in each pooling block.
bn_size : int, default 4
Multiplicative factor for number of bottle neck layers.
(i.e. bn_size * k features in the bottleneck layer)
dropout : float, default 0
Rate of dropout after each dense layer.
classes : int, default 1000
Number of classification classes.
"""
def __init__(self, num_init_features, growth_rate, block_config,
bn_size=4, dropout=0, classes=1000, **kwargs):
super(DenseNet, self).__init__(**kwargs)
with self.name_scope():
self.features = nn.HybridSequential(prefix='')
self.features.add(nn.Conv2D(num_init_features, kernel_size=7,
strides=2, padding=3, use_bias=False))
self.features.add(nn.BatchNorm())
self.features.add(nn.Activation('relu'))
self.features.add(nn.MaxPool2D(pool_size=3, strides=2, padding=1))
# Add dense blocks
num_features = num_init_features
for i, num_layers in enumerate(block_config):
self.features.add(_make_dense_block(num_layers, bn_size, growth_rate, dropout, i+1))
num_features = num_features + num_layers * growth_rate
if i != len(block_config) - 1:
self.features.add(_make_transition(num_features // 2))
num_features = num_features // 2
self.features.add(nn.BatchNorm())
self.features.add(nn.Activation('relu'))
self.features.add(nn.AvgPool2D(pool_size=7))
self.features.add(nn.Flatten())
self.output = nn.Dense(classes)
def hybrid_forward(self, F, x):
x = self.features(x)
x = self.output(x)
return x
# Specification
densenet_spec = {121: (64, 32, [6, 12, 24, 16]),
161: (96, 48, [6, 12, 36, 24]),
169: (64, 32, [6, 12, 32, 32]),
201: (64, 32, [6, 12, 48, 32])}
# Constructor
def get_densenet(num_layers, pretrained=False, ctx=cpu(), root='~/.mxnet/models', **kwargs):
r"""Densenet-BC model from the
`"Densely Connected Convolutional Networks" <https://arxiv.org/pdf/1608.06993.pdf>`_ paper.
Parameters
----------
num_layers : int
Number of layers for the variant of densenet. Options are 121, 161, 169, 201.
pretrained : bool, default False
Whether to load the pretrained weights for model.
ctx : Context, default CPU
The context in which to load the pretrained weights.
root : str, default '~/.mxnet/models'
Location for keeping the model parameters.
"""
num_init_features, growth_rate, block_config = densenet_spec[num_layers]
net = DenseNet(num_init_features, growth_rate, block_config, **kwargs)
if pretrained:
from ..model_store import get_model_file
net.load_params(get_model_file('densenet%d'%(num_layers), root=root), ctx=ctx)
return net
def densenet121(**kwargs):
r"""Densenet-BC 121-layer model from the
`"Densely Connected Convolutional Networks" <https://arxiv.org/pdf/1608.06993.pdf>`_ paper.
Parameters
----------
pretrained : bool, default False
Whether to load the pretrained weights for model.
ctx : Context, default CPU
The context in which to load the pretrained weights.
root : str, default '~/.mxnet/models'
Location for keeping the model parameters.
"""
return get_densenet(121, **kwargs)
def densenet161(**kwargs):
r"""Densenet-BC 161-layer model from the
`"Densely Connected Convolutional Networks" <https://arxiv.org/pdf/1608.06993.pdf>`_ paper.
Parameters
----------
pretrained : bool, default False
Whether to load the pretrained weights for model.
ctx : Context, default CPU
The context in which to load the pretrained weights.
root : str, default '~/.mxnet/models'
Location for keeping the model parameters.
"""
return get_densenet(161, **kwargs)
def densenet169(**kwargs):
r"""Densenet-BC 169-layer model from the
`"Densely Connected Convolutional Networks" <https://arxiv.org/pdf/1608.06993.pdf>`_ paper.
Parameters
----------
pretrained : bool, default False
Whether to load the pretrained weights for model.
ctx : Context, default CPU
The context in which to load the pretrained weights.
root : str, default '~/.mxnet/models'
Location for keeping the model parameters.
"""
return get_densenet(169, **kwargs)
def densenet201(**kwargs):
r"""Densenet-BC 201-layer model from the
`"Densely Connected Convolutional Networks" <https://arxiv.org/pdf/1608.06993.pdf>`_ paper.
Parameters
----------
pretrained : bool, default False
Whether to load the pretrained weights for model.
ctx : Context, default CPU
The context in which to load the pretrained weights.
root : str, default '~/.mxnet/models'
Location for keeping the model parameters.
"""
return get_densenet(201, **kwargs)
| apache-2.0 | -1,877,315,610,535,911,700 | 37.851485 | 100 | 0.650102 | false |
catapult-project/catapult | third_party/gsutil/third_party/pyu2f/pyu2f/convenience/baseauthenticator.py | 13 | 2202 | # Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Interface to handle end to end flow of U2F signing."""
import sys
class BaseAuthenticator(object):
"""Interface to handle end to end flow of U2F signing."""
def Authenticate(self, app_id, challenge_data,
print_callback=sys.stderr.write):
"""Authenticates app_id with a security key.
Executes the U2F authentication/signature flow with a security key.
Args:
app_id: The app_id to register the security key against.
challenge_data: List of dictionaries containing a RegisteredKey ('key')
and the raw challenge data ('challenge') for this key.
print_callback: Callback to print a message to the user. The callback
function takes one argument--the message to display.
Returns:
A dictionary with the following fields:
'clientData': url-safe base64 encoded ClientData JSON signed by the key.
'signatureData': url-safe base64 encoded signature.
'applicationId': application id.
'keyHandle': url-safe base64 encoded handle of the key used to sign.
Raises:
U2FError: There was some kind of problem with registration (e.g.
the device was already registered or there was a timeout waiting
for the test of user presence).
"""
raise NotImplementedError
def IsAvailable(self):
"""Indicates whether the authenticator implementation is available to sign.
The caller should not call Authenticate() if IsAvailable() returns False
Returns:
True if the authenticator is available to sign and False otherwise.
"""
raise NotImplementedError
| bsd-3-clause | 2,227,190,522,359,021,000 | 36.965517 | 80 | 0.71753 | false |
jreut/Switcharoo | scraper/scraper/entryqueue.py | 2 | 2041 | # Copyright 2015 Adam Greenstein <[email protected]>
#
# Switcharoo Cartographer is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Switcharoo Cartographer is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with Switcharoo Cartographer. If not, see <http://www.gnu.org/licenses/>.
from data import Access, Entry, EntryError
from Queue import Queue
class EntryQueue(Queue):
def __init__(self, transverse, maxsize=0):
self.reddit = transverse.reddit
self.events = transverse.events
Queue.__init__(self, maxsize)
def _init(self, maxsize):
Queue._init(self, maxsize)
nodes = Access(self.events).get_entry(maxsize)
for node in nodes:
try:
self.queue.append(Entry(node['raw_url'], self.reddit))
self.events.on_adding_to_queue(node['raw_url'])
except EntryError:
# TODO Remove old entry from DB
pass
def _put(self, url):
try:
entry = Entry(url, self.reddit)
if self._is_unique(entry):
self.events.on_adding_to_queue(url)
self.queue.append(entry)
else:
self.events.on_not_adding_to_queue(url)
except EntryError:
self.events.on_not_adding_to_queue(url)
def _get(self):
return self.queue.popleft()
def _is_unique(self, entry):
# TODO Logic here to determine if new url found
if entry not in self.queue:
return Access(self.events).is_unique_entry(entry)
else:
return False | gpl-3.0 | -7,789,401,602,929,347,000 | 35.464286 | 82 | 0.641352 | false |
sunshine027/mic | mic/utils/misc.py | 5 | 35129 | #!/usr/bin/python -tt
#
# Copyright (c) 2010, 2011 Intel Inc.
#
# This program is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the Free
# Software Foundation; version 2 of the License
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
# for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc., 59
# Temple Place - Suite 330, Boston, MA 02111-1307, USA.
from __future__ import with_statement
import os
import sys
import time
import tempfile
import re
import shutil
import glob
import hashlib
import subprocess
import platform
import traceback
try:
import sqlite3 as sqlite
except ImportError:
import sqlite
try:
from xml.etree import cElementTree
except ImportError:
import cElementTree
xmlparse = cElementTree.parse
from mic import msger
from mic.utils.errors import CreatorError, SquashfsError
from mic.utils.fs_related import find_binary_path, makedirs
from mic.utils.grabber import myurlgrab
from mic.utils.proxy import get_proxy_for
from mic.utils import runner
from mic.utils import rpmmisc
from mic.utils.safeurl import SafeURL
RPM_RE = re.compile("(.*)\.(.*) (.*)-(.*)")
RPM_FMT = "%(name)s.%(arch)s %(version)s-%(release)s"
SRPM_RE = re.compile("(.*)-(\d+.*)-(\d+\.\d+).src.rpm")
def build_name(kscfg, release=None, prefix = None, suffix = None):
"""Construct and return an image name string.
This is a utility function to help create sensible name and fslabel
strings. The name is constructed using the sans-prefix-and-extension
kickstart filename and the supplied prefix and suffix.
kscfg -- a path to a kickstart file
release -- a replacement to suffix for image release
prefix -- a prefix to prepend to the name; defaults to None, which causes
no prefix to be used
suffix -- a suffix to append to the name; defaults to None, which causes
a YYYYMMDDHHMM suffix to be used
Note, if maxlen is less then the len(suffix), you get to keep both pieces.
"""
name = os.path.basename(kscfg)
idx = name.rfind('.')
if idx >= 0:
name = name[:idx]
if release is not None:
suffix = ""
if prefix is None:
prefix = ""
if suffix is None:
suffix = time.strftime("%Y%m%d%H%M")
if name.startswith(prefix):
name = name[len(prefix):]
prefix = "%s-" % prefix if prefix else ""
suffix = "-%s" % suffix if suffix else ""
ret = prefix + name + suffix
return ret
def get_distro():
"""Detect linux distribution, support "meego"
"""
support_dists = ('SuSE',
'debian',
'fedora',
'redhat',
'centos',
'meego',
'moblin',
'tizen')
try:
(dist, ver, id) = platform.linux_distribution( \
supported_dists = support_dists)
except:
(dist, ver, id) = platform.dist( \
supported_dists = support_dists)
return (dist, ver, id)
def get_hostname():
"""Get hostname
"""
return platform.node()
def get_hostname_distro_str():
"""Get composited string for current linux distribution
"""
(dist, ver, id) = get_distro()
hostname = get_hostname()
if not dist:
return "%s(Unknown Linux Distribution)" % hostname
else:
distro_str = ' '.join(map(str.strip, (hostname, dist, ver, id)))
return distro_str.strip()
_LOOP_RULE_PTH = None
def hide_loopdev_presentation():
udev_rules = "80-prevent-loop-present.rules"
udev_rules_dir = [
'/usr/lib/udev/rules.d/',
'/lib/udev/rules.d/',
'/etc/udev/rules.d/'
]
global _LOOP_RULE_PTH
for rdir in udev_rules_dir:
if os.path.exists(rdir):
_LOOP_RULE_PTH = os.path.join(rdir, udev_rules)
if not _LOOP_RULE_PTH:
return
try:
with open(_LOOP_RULE_PTH, 'w') as wf:
wf.write('KERNEL=="loop*", ENV{UDISKS_PRESENTATION_HIDE}="1"')
runner.quiet('udevadm trigger')
except:
pass
def unhide_loopdev_presentation():
global _LOOP_RULE_PTH
if not _LOOP_RULE_PTH:
return
try:
os.unlink(_LOOP_RULE_PTH)
runner.quiet('udevadm trigger')
except:
pass
def extract_rpm(rpmfile, targetdir):
rpm2cpio = find_binary_path("rpm2cpio")
cpio = find_binary_path("cpio")
olddir = os.getcwd()
os.chdir(targetdir)
msger.verbose("Extract rpm file with cpio: %s" % rpmfile)
p1 = subprocess.Popen([rpm2cpio, rpmfile], stdout=subprocess.PIPE)
p2 = subprocess.Popen([cpio, "-idv"], stdin=p1.stdout,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
p1.stdout.close()
(sout, serr) = p2.communicate()
msger.verbose(sout or serr)
os.chdir(olddir)
def human_size(size):
"""Return human readable string for Bytes size
"""
if size <= 0:
return "0M"
import math
measure = ['B', 'K', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y']
expo = int(math.log(size, 1024))
mant = float(size/math.pow(1024, expo))
return "{0:.1f}{1:s}".format(mant, measure[expo])
def get_block_size(file_obj):
""" Returns block size for file object 'file_obj'. Errors are indicated by
the 'IOError' exception. """
from fcntl import ioctl
import struct
# Get the block size of the host file-system for the image file by calling
# the FIGETBSZ ioctl (number 2).
binary_data = ioctl(file_obj, 2, struct.pack('I', 0))
return struct.unpack('I', binary_data)[0]
def check_space_pre_cp(src, dst):
"""Check whether disk space is enough before 'cp' like
operations, else exception will be raised.
"""
srcsize = get_file_size(src) * 1024 * 1024
freesize = get_filesystem_avail(dst)
if srcsize > freesize:
raise CreatorError("space on %s(%s) is not enough for about %s files"
% (dst, human_size(freesize), human_size(srcsize)))
def calc_hashes(file_path, hash_names, start = 0, end = None):
""" Calculate hashes for a file. The 'file_path' argument is the file
to calculate hash functions for, 'start' and 'end' are the starting and
ending file offset to calculate the has functions for. The 'hash_names'
argument is a list of hash names to calculate. Returns the the list
of calculated hash values in the hexadecimal form in the same order
as 'hash_names'.
"""
if end == None:
end = os.path.getsize(file_path)
chunk_size = 65536
to_read = end - start
read = 0
hashes = []
for hash_name in hash_names:
hashes.append(hashlib.new(hash_name))
with open(file_path, "rb") as f:
f.seek(start)
while read < to_read:
if read + chunk_size > to_read:
chunk_size = to_read - read
chunk = f.read(chunk_size)
for hash_obj in hashes:
hash_obj.update(chunk)
read += chunk_size
result = []
for hash_obj in hashes:
result.append(hash_obj.hexdigest())
return result
def get_md5sum(fpath):
return calc_hashes(fpath, ('md5', ))[0]
def get_sha1sum(fpath):
return calc_hashes(fpath, ('sha1', ))[0]
def get_sha256sum(fpath):
return calc_hashes(fpath, ('sha256', ))[0]
def normalize_ksfile(ksconf, release, arch):
'''
Return the name of a normalized ks file in which macro variables
@BUILD_ID@ and @ARCH@ are replace with real values.
The original ks file is returned if no special macro is used, otherwise
a temp file is created and returned, which will be deleted when program
exits normally.
'''
if not release:
release = "latest"
if not arch or re.match(r'i.86', arch):
arch = "ia32"
with open(ksconf) as f:
ksc = f.read()
if "@ARCH@" not in ksc and "@BUILD_ID@" not in ksc:
return ksconf
msger.info("Substitute macro variable @BUILD_ID@/@ARCH@ in ks: %s" % ksconf)
ksc = ksc.replace("@ARCH@", arch)
ksc = ksc.replace("@BUILD_ID@", release)
fd, ksconf = tempfile.mkstemp(prefix=os.path.basename(ksconf))
os.write(fd, ksc)
os.close(fd)
msger.debug('normalized ks file:%s' % ksconf)
def remove_temp_ks():
try:
os.unlink(ksconf)
except OSError, err:
msger.warning('Failed to remove temp ks file:%s:%s' % (ksconf, err))
import atexit
atexit.register(remove_temp_ks)
return ksconf
def _check_mic_chroot(rootdir):
def _path(path):
return rootdir.rstrip('/') + path
release_files = map(_path, [ "/etc/moblin-release",
"/etc/meego-release",
"/etc/tizen-release"])
if not any(map(os.path.exists, release_files)):
msger.warning("Dir %s is not a MeeGo/Tizen chroot env" % rootdir)
if not glob.glob(rootdir + "/boot/vmlinuz-*"):
msger.warning("Failed to find kernel module under %s" % rootdir)
return
def selinux_check(arch, fstypes):
try:
getenforce = find_binary_path('getenforce')
except CreatorError:
return
selinux_status = runner.outs([getenforce])
if arch and arch.startswith("arm") and selinux_status == "Enforcing":
raise CreatorError("Can't create arm image if selinux is enabled, "
"please run 'setenforce 0' to disable selinux")
use_btrfs = filter(lambda typ: typ == 'btrfs', fstypes)
if use_btrfs and selinux_status == "Enforcing":
raise CreatorError("Can't create btrfs image if selinux is enabled,"
" please run 'setenforce 0' to disable selinux")
def get_image_type(path):
def _get_extension_name(path):
match = re.search("(?<=\.)\w+$", path)
if match:
return match.group(0)
else:
return None
if os.path.isdir(path):
_check_mic_chroot(path)
return "fs"
maptab = {
"tar": "loop",
"raw":"raw",
"vmdk":"vmdk",
"vdi":"vdi",
"iso":"livecd",
"usbimg":"liveusb",
}
extension = _get_extension_name(path)
if extension in maptab:
return maptab[extension]
fd = open(path, "rb")
file_header = fd.read(1024)
fd.close()
vdi_flag = "<<< Sun VirtualBox Disk Image >>>"
if file_header[0:len(vdi_flag)] == vdi_flag:
return maptab["vdi"]
output = runner.outs(['file', path])
isoptn = re.compile(r".*ISO 9660 CD-ROM filesystem.*(bootable).*")
usbimgptn = re.compile(r".*x86 boot sector.*active.*")
rawptn = re.compile(r".*x86 boot sector.*")
vmdkptn = re.compile(r".*VMware. disk image.*")
ext3fsimgptn = re.compile(r".*Linux.*ext3 filesystem data.*")
ext4fsimgptn = re.compile(r".*Linux.*ext4 filesystem data.*")
btrfsimgptn = re.compile(r".*BTRFS.*")
if isoptn.match(output):
return maptab["iso"]
elif usbimgptn.match(output):
return maptab["usbimg"]
elif rawptn.match(output):
return maptab["raw"]
elif vmdkptn.match(output):
return maptab["vmdk"]
elif ext3fsimgptn.match(output):
return "ext3fsimg"
elif ext4fsimgptn.match(output):
return "ext4fsimg"
elif btrfsimgptn.match(output):
return "btrfsimg"
else:
raise CreatorError("Cannot detect the type of image: %s" % path)
def get_file_size(filename):
""" Return size in MB unit """
cmd = ['du', "-s", "-b", "-B", "1M", filename]
rc, duOutput = runner.runtool(cmd)
if rc != 0:
raise CreatorError("Failed to run: %s" % ' '.join(cmd))
size1 = int(duOutput.split()[0])
cmd = ['du', "-s", "-B", "1M", filename]
rc, duOutput = runner.runtool(cmd)
if rc != 0:
raise CreatorError("Failed to run: %s" % ' '.join(cmd))
size2 = int(duOutput.split()[0])
return max(size1, size2)
def get_filesystem_avail(fs):
vfstat = os.statvfs(fs)
return vfstat.f_bavail * vfstat.f_bsize
def convert_image(srcimg, srcfmt, dstimg, dstfmt):
#convert disk format
if dstfmt != "raw":
raise CreatorError("Invalid destination image format: %s" % dstfmt)
msger.debug("converting %s image to %s" % (srcimg, dstimg))
if srcfmt == "vmdk":
path = find_binary_path("qemu-img")
argv = [path, "convert", "-f", "vmdk", srcimg, "-O", dstfmt, dstimg]
elif srcfmt == "vdi":
path = find_binary_path("VBoxManage")
argv = [path, "internalcommands", "converttoraw", srcimg, dstimg]
else:
raise CreatorError("Invalid soure image format: %s" % srcfmt)
rc = runner.show(argv)
if rc == 0:
msger.debug("convert successful")
if rc != 0:
raise CreatorError("Unable to convert disk to %s" % dstfmt)
def uncompress_squashfs(squashfsimg, outdir):
"""Uncompress file system from squshfs image"""
unsquashfs = find_binary_path("unsquashfs")
args = [ unsquashfs, "-d", outdir, squashfsimg ]
rc = runner.show(args)
if (rc != 0):
raise SquashfsError("Failed to uncompress %s." % squashfsimg)
def mkdtemp(dir = "/var/tmp", prefix = "mic-tmp-"):
""" FIXME: use the dir in mic.conf instead """
makedirs(dir)
return tempfile.mkdtemp(dir = dir, prefix = prefix)
def get_repostrs_from_ks(ks):
def _get_temp_reponame(baseurl):
md5obj = hashlib.md5(baseurl)
tmpreponame = "%s" % md5obj.hexdigest()
return tmpreponame
kickstart_repos = []
for repodata in ks.handler.repo.repoList:
repo = {}
for attr in ('name',
'baseurl',
'mirrorlist',
'includepkgs', # val is list
'excludepkgs', # val is list
'cost', # int
'priority',# int
'save',
'proxy',
'proxyuser',
'proxypasswd',
'proxypasswd',
'debuginfo',
'source',
'gpgkey',
'ssl_verify'):
if hasattr(repodata, attr) and getattr(repodata, attr):
repo[attr] = getattr(repodata, attr)
if 'name' not in repo:
repo['name'] = _get_temp_reponame(repodata.baseurl)
if hasattr(repodata, 'baseurl') and getattr(repodata, 'baseurl'):
repo['baseurl'] = SafeURL(getattr(repodata, 'baseurl'),
getattr(repodata, 'user', None),
getattr(repodata, 'passwd', None))
kickstart_repos.append(repo)
return kickstart_repos
def _get_uncompressed_data_from_url(url, filename, proxies):
filename = myurlgrab(url.full, filename, proxies)
suffix = None
if filename.endswith(".gz"):
suffix = ".gz"
runner.quiet(['gunzip', "-f", filename])
elif filename.endswith(".bz2"):
suffix = ".bz2"
runner.quiet(['bunzip2', "-f", filename])
if suffix:
filename = filename.replace(suffix, "")
return filename
def _get_metadata_from_repo(baseurl, proxies, cachedir, reponame, filename,
sumtype=None, checksum=None):
url = baseurl.join(filename)
filename_tmp = str("%s/%s/%s" % (cachedir, reponame, os.path.basename(filename)))
if os.path.splitext(filename_tmp)[1] in (".gz", ".bz2"):
filename = os.path.splitext(filename_tmp)[0]
else:
filename = filename_tmp
if sumtype and checksum and os.path.exists(filename):
try:
sumcmd = find_binary_path("%ssum" % sumtype)
except:
file_checksum = None
else:
file_checksum = runner.outs([sumcmd, filename]).split()[0]
if file_checksum and file_checksum == checksum:
return filename
return _get_uncompressed_data_from_url(url,filename_tmp,proxies)
def get_metadata_from_repos(repos, cachedir):
my_repo_metadata = []
for repo in repos:
reponame = repo.name
baseurl = repo.baseurl
if hasattr(repo, 'proxy'):
proxy = repo.proxy
else:
proxy = get_proxy_for(baseurl)
proxies = None
if proxy:
proxies = {str(baseurl.split(":")[0]): str(proxy)}
makedirs(os.path.join(cachedir, reponame))
url = baseurl.join("repodata/repomd.xml")
filename = os.path.join(cachedir, reponame, 'repomd.xml')
repomd = myurlgrab(url.full, filename, proxies)
try:
root = xmlparse(repomd)
except SyntaxError:
raise CreatorError("repomd.xml syntax error.")
ns = root.getroot().tag
ns = ns[0:ns.rindex("}")+1]
filepaths = {}
checksums = {}
sumtypes = {}
for elm in root.getiterator("%sdata" % ns):
if elm.attrib["type"] == "patterns":
filepaths['patterns'] = elm.find("%slocation" % ns).attrib['href']
checksums['patterns'] = elm.find("%sopen-checksum" % ns).text
sumtypes['patterns'] = elm.find("%sopen-checksum" % ns).attrib['type']
break
for elm in root.getiterator("%sdata" % ns):
if elm.attrib["type"] in ("group_gz", "group"):
filepaths['comps'] = elm.find("%slocation" % ns).attrib['href']
checksums['comps'] = elm.find("%sopen-checksum" % ns).text
sumtypes['comps'] = elm.find("%sopen-checksum" % ns).attrib['type']
break
primary_type = None
for elm in root.getiterator("%sdata" % ns):
if elm.attrib["type"] in ("primary_db", "primary"):
primary_type = elm.attrib["type"]
filepaths['primary'] = elm.find("%slocation" % ns).attrib['href']
checksums['primary'] = elm.find("%sopen-checksum" % ns).text
sumtypes['primary'] = elm.find("%sopen-checksum" % ns).attrib['type']
break
if not primary_type:
continue
for item in ("primary", "patterns", "comps"):
if item not in filepaths:
filepaths[item] = None
continue
if not filepaths[item]:
continue
filepaths[item] = _get_metadata_from_repo(baseurl,
proxies,
cachedir,
reponame,
filepaths[item],
sumtypes[item],
checksums[item])
""" Get repo key """
try:
repokey = _get_metadata_from_repo(baseurl,
proxies,
cachedir,
reponame,
"repodata/repomd.xml.key")
except CreatorError:
repokey = None
msger.debug("\ncan't get %s/%s" % (baseurl, "repodata/repomd.xml.key"))
my_repo_metadata.append({"name":reponame,
"baseurl":baseurl,
"repomd":repomd,
"primary":filepaths['primary'],
"cachedir":cachedir,
"proxies":proxies,
"patterns":filepaths['patterns'],
"comps":filepaths['comps'],
"repokey":repokey})
return my_repo_metadata
def get_rpmver_in_repo(repometadata):
for repo in repometadata:
if repo["primary"].endswith(".xml"):
root = xmlparse(repo["primary"])
ns = root.getroot().tag
ns = ns[0:ns.rindex("}")+1]
versionlist = []
for elm in root.getiterator("%spackage" % ns):
if elm.find("%sname" % ns).text == 'rpm':
for node in elm.getchildren():
if node.tag == "%sversion" % ns:
versionlist.append(node.attrib['ver'])
if versionlist:
return reversed(
sorted(
versionlist,
key = lambda ver: map(int, ver.split('.')))).next()
elif repo["primary"].endswith(".sqlite"):
con = sqlite.connect(repo["primary"])
for row in con.execute("select version from packages where "
"name=\"rpm\" ORDER by version DESC"):
con.close()
return row[0]
return None
def get_arch(repometadata):
archlist = []
for repo in repometadata:
if repo["primary"].endswith(".xml"):
root = xmlparse(repo["primary"])
ns = root.getroot().tag
ns = ns[0:ns.rindex("}")+1]
for elm in root.getiterator("%spackage" % ns):
if elm.find("%sarch" % ns).text not in ("noarch", "src"):
arch = elm.find("%sarch" % ns).text
if arch not in archlist:
archlist.append(arch)
elif repo["primary"].endswith(".sqlite"):
con = sqlite.connect(repo["primary"])
for row in con.execute("select arch from packages where arch not in (\"src\", \"noarch\")"):
if row[0] not in archlist:
archlist.append(row[0])
con.close()
uniq_arch = []
for i in range(len(archlist)):
if archlist[i] not in rpmmisc.archPolicies.keys():
continue
need_append = True
j = 0
while j < len(uniq_arch):
if archlist[i] in rpmmisc.archPolicies[uniq_arch[j]].split(':'):
need_append = False
break
if uniq_arch[j] in rpmmisc.archPolicies[archlist[i]].split(':'):
if need_append:
uniq_arch[j] = archlist[i]
need_append = False
else:
uniq_arch.remove(uniq_arch[j])
continue
j += 1
if need_append:
uniq_arch.append(archlist[i])
return uniq_arch, archlist
def get_package(pkg, repometadata, arch = None):
ver = ""
target_repo = None
if not arch:
arches = []
elif arch not in rpmmisc.archPolicies:
arches = [arch]
else:
arches = rpmmisc.archPolicies[arch].split(':')
arches.append('noarch')
for repo in repometadata:
if repo["primary"].endswith(".xml"):
root = xmlparse(repo["primary"])
ns = root.getroot().tag
ns = ns[0:ns.rindex("}")+1]
for elm in root.getiterator("%spackage" % ns):
if elm.find("%sname" % ns).text == pkg:
if elm.find("%sarch" % ns).text in arches:
version = elm.find("%sversion" % ns)
tmpver = "%s-%s" % (version.attrib['ver'], version.attrib['rel'])
if tmpver > ver:
ver = tmpver
location = elm.find("%slocation" % ns)
pkgpath = "%s" % location.attrib['href']
target_repo = repo
break
if repo["primary"].endswith(".sqlite"):
con = sqlite.connect(repo["primary"])
if arch:
sql = 'select version, release, location_href from packages ' \
'where name = "%s" and arch IN ("%s")' % \
(pkg, '","'.join(arches))
for row in con.execute(sql):
tmpver = "%s-%s" % (row[0], row[1])
if tmpver > ver:
ver = tmpver
pkgpath = "%s" % row[2]
target_repo = repo
break
else:
sql = 'select version, release, location_href from packages ' \
'where name = "%s"' % pkg
for row in con.execute(sql):
tmpver = "%s-%s" % (row[0], row[1])
if tmpver > ver:
ver = tmpver
pkgpath = "%s" % row[2]
target_repo = repo
break
con.close()
if target_repo:
makedirs("%s/packages/%s" % (target_repo["cachedir"], target_repo["name"]))
url = target_repo["baseurl"].join(pkgpath)
filename = str("%s/packages/%s/%s" % (target_repo["cachedir"], target_repo["name"], os.path.basename(pkgpath)))
if os.path.exists(filename):
ret = rpmmisc.checkRpmIntegrity('rpm', filename)
if ret == 0:
return filename
msger.warning("package %s is damaged: %s" %
(os.path.basename(filename), filename))
os.unlink(filename)
pkg = myurlgrab(url.full, filename, target_repo["proxies"])
return pkg
else:
return None
def get_source_name(pkg, repometadata):
def get_bin_name(pkg):
m = RPM_RE.match(pkg)
if m:
return m.group(1)
return None
def get_src_name(srpm):
m = SRPM_RE.match(srpm)
if m:
return m.group(1)
return None
ver = ""
target_repo = None
pkg_name = get_bin_name(pkg)
if not pkg_name:
return None
for repo in repometadata:
if repo["primary"].endswith(".xml"):
root = xmlparse(repo["primary"])
ns = root.getroot().tag
ns = ns[0:ns.rindex("}")+1]
for elm in root.getiterator("%spackage" % ns):
if elm.find("%sname" % ns).text == pkg_name:
if elm.find("%sarch" % ns).text != "src":
version = elm.find("%sversion" % ns)
tmpver = "%s-%s" % (version.attrib['ver'], version.attrib['rel'])
if tmpver > ver:
ver = tmpver
fmt = elm.find("%sformat" % ns)
if fmt:
fns = fmt.getchildren()[0].tag
fns = fns[0:fns.rindex("}")+1]
pkgpath = fmt.find("%ssourcerpm" % fns).text
target_repo = repo
break
if repo["primary"].endswith(".sqlite"):
con = sqlite.connect(repo["primary"])
for row in con.execute("select version, release, rpm_sourcerpm from packages where name = \"%s\" and arch != \"src\"" % pkg_name):
tmpver = "%s-%s" % (row[0], row[1])
if tmpver > ver:
pkgpath = "%s" % row[2]
target_repo = repo
break
con.close()
if target_repo:
return get_src_name(pkgpath)
else:
return None
def get_pkglist_in_patterns(group, patterns):
found = False
pkglist = []
try:
root = xmlparse(patterns)
except SyntaxError:
raise SyntaxError("%s syntax error." % patterns)
for elm in list(root.getroot()):
ns = elm.tag
ns = ns[0:ns.rindex("}")+1]
name = elm.find("%sname" % ns)
summary = elm.find("%ssummary" % ns)
if name.text == group or summary.text == group:
found = True
break
if not found:
return pkglist
found = False
for requires in list(elm):
if requires.tag.endswith("requires"):
found = True
break
if not found:
return pkglist
for pkg in list(requires):
pkgname = pkg.attrib["name"]
if pkgname not in pkglist:
pkglist.append(pkgname)
return pkglist
def get_pkglist_in_comps(group, comps):
found = False
pkglist = []
try:
root = xmlparse(comps)
except SyntaxError:
raise SyntaxError("%s syntax error." % comps)
for elm in root.getiterator("group"):
id = elm.find("id")
name = elm.find("name")
if id.text == group or name.text == group:
packagelist = elm.find("packagelist")
found = True
break
if not found:
return pkglist
for require in elm.getiterator("packagereq"):
if require.tag.endswith("packagereq"):
pkgname = require.text
if pkgname not in pkglist:
pkglist.append(pkgname)
return pkglist
def is_statically_linked(binary):
return ", statically linked, " in runner.outs(['file', binary])
def setup_qemu_emulator(rootdir, arch):
# mount binfmt_misc if it doesn't exist
if not os.path.exists("/proc/sys/fs/binfmt_misc"):
modprobecmd = find_binary_path("modprobe")
runner.show([modprobecmd, "binfmt_misc"])
if not os.path.exists("/proc/sys/fs/binfmt_misc/register"):
mountcmd = find_binary_path("mount")
runner.show([mountcmd, "-t", "binfmt_misc", "none", "/proc/sys/fs/binfmt_misc"])
# qemu_emulator is a special case, we can't use find_binary_path
# qemu emulator should be a statically-linked executable file
if arch == "aarch64":
node = "/proc/sys/fs/binfmt_misc/aarch64"
if os.path.exists("/usr/bin/qemu-arm64") and is_statically_linked("/usr/bin/qemu-arm64"):
arm_binary = "qemu-arm64"
elif os.path.exists("/usr/bin/qemu-aarch64") and is_statically_linked("/usr/bin/qemu-aarch64"):
arm_binary = "qemu-aarch64"
elif os.path.exists("/usr/bin/qemu-arm64-static"):
arm_binary = "qemu-arm64-static"
elif os.path.exists("/usr/bin/qemu-aarch64-static"):
arm_binary = "qemu-aarch64-static"
else:
raise CreatorError("Please install a statically-linked %s" % arm_binary)
elif arch == "mipsel":
node = "/proc/sys/fs/binfmt_misc/mipsel"
arm_binary = "qemu-mipsel"
if not os.path.exists("/usr/bin/%s" % arm_binary) or not is_statically_linked("/usr/bin/%s"):
arm_binary = "qemu-mipsel-static"
if not os.path.exists("/usr/bin/%s" % arm_binary):
raise CreatorError("Please install a statically-linked %s" % arm_binary)
else:
node = "/proc/sys/fs/binfmt_misc/arm"
arm_binary = "qemu-arm"
if not os.path.exists("/usr/bin/qemu-arm") or not is_statically_linked("/usr/bin/qemu-arm"):
arm_binary = "qemu-arm-static"
if not os.path.exists("/usr/bin/%s" % arm_binary):
raise CreatorError("Please install a statically-linked %s" % arm_binary)
qemu_emulator = "/usr/bin/%s" % arm_binary
if not os.path.exists(rootdir + "/usr/bin"):
makedirs(rootdir + "/usr/bin")
shutil.copy(qemu_emulator, rootdir + qemu_emulator)
# disable selinux, selinux will block qemu emulator to run
if os.path.exists("/usr/sbin/setenforce"):
msger.info('Try to disable selinux')
runner.show(["/usr/sbin/setenforce", "0"])
# unregister it if it has been registered and is a dynamically-linked executable
if os.path.exists(node):
qemu_unregister_string = "-1\n"
with open(node, "w") as fd:
fd.write(qemu_unregister_string)
# register qemu emulator for interpreting other arch executable file
if not os.path.exists(node):
if arch == "aarch64":
qemu_arm_string = ":aarch64:M::\\x7fELF\\x02\\x01\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\xb7:\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xfe\\xff\\xff:%s:\n" % qemu_emulator
elif arch == "mipsel":
qemu_arm_string = ":mipsel:M::\\x7fELF\\x01\\x01\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\x08\\x00:\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x00\\xfe\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xfe\\xff\\xff\\xff:%s:\n" % qemu_emulator
else:
qemu_arm_string = ":arm:M::\\x7fELF\\x01\\x01\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\x28\\x00:\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xfa\\xff\\xff\\xff:%s:\n" % qemu_emulator
with open("/proc/sys/fs/binfmt_misc/register", "w") as fd:
fd.write(qemu_arm_string)
return qemu_emulator
def SrcpkgsDownload(pkgs, repometadata, instroot, cachedir):
def get_source_repometadata(repometadata):
src_repometadata=[]
for repo in repometadata:
if repo["name"].endswith("-source"):
src_repometadata.append(repo)
if src_repometadata:
return src_repometadata
return None
def get_src_name(srpm):
m = SRPM_RE.match(srpm)
if m:
return m.group(1)
return None
src_repometadata = get_source_repometadata(repometadata)
if not src_repometadata:
msger.warning("No source repo found")
return None
src_pkgs = []
lpkgs_dict = {}
lpkgs_path = []
for repo in src_repometadata:
cachepath = "%s/%s/packages/*.src.rpm" %(cachedir, repo["name"])
lpkgs_path += glob.glob(cachepath)
for lpkg in lpkgs_path:
lpkg_name = get_src_name(os.path.basename(lpkg))
lpkgs_dict[lpkg_name] = lpkg
localpkgs = lpkgs_dict.keys()
cached_count = 0
destdir = instroot+'/usr/src/SRPMS'
if not os.path.exists(destdir):
os.makedirs(destdir)
srcpkgset = set()
for _pkg in pkgs:
srcpkg_name = get_source_name(_pkg, repometadata)
if not srcpkg_name:
continue
srcpkgset.add(srcpkg_name)
for pkg in list(srcpkgset):
if pkg in localpkgs:
cached_count += 1
shutil.copy(lpkgs_dict[pkg], destdir)
src_pkgs.append(os.path.basename(lpkgs_dict[pkg]))
else:
src_pkg = get_package(pkg, src_repometadata, 'src')
if src_pkg:
shutil.copy(src_pkg, destdir)
src_pkgs.append(src_pkg)
msger.info("%d source packages gotten from cache" % cached_count)
return src_pkgs
def strip_end(text, suffix):
if not text.endswith(suffix):
return text
return text[:-len(suffix)]
| gpl-2.0 | -489,292,673,832,215,740 | 33.440196 | 254 | 0.546329 | false |
Endika/django | tests/multiple_database/models.py | 282 | 2472 | from django.contrib.auth.models import User
from django.contrib.contenttypes.fields import (
GenericForeignKey, GenericRelation,
)
from django.contrib.contenttypes.models import ContentType
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class Review(models.Model):
source = models.CharField(max_length=100)
content_type = models.ForeignKey(ContentType, models.CASCADE)
object_id = models.PositiveIntegerField()
content_object = GenericForeignKey()
def __str__(self):
return self.source
class Meta:
ordering = ('source',)
class PersonManager(models.Manager):
def get_by_natural_key(self, name):
return self.get(name=name)
@python_2_unicode_compatible
class Person(models.Model):
objects = PersonManager()
name = models.CharField(max_length=100)
def __str__(self):
return self.name
class Meta:
ordering = ('name',)
# This book manager doesn't do anything interesting; it just
# exists to strip out the 'extra_arg' argument to certain
# calls. This argument is used to establish that the BookManager
# is actually getting used when it should be.
class BookManager(models.Manager):
def create(self, *args, **kwargs):
kwargs.pop('extra_arg', None)
return super(BookManager, self).create(*args, **kwargs)
def get_or_create(self, *args, **kwargs):
kwargs.pop('extra_arg', None)
return super(BookManager, self).get_or_create(*args, **kwargs)
@python_2_unicode_compatible
class Book(models.Model):
objects = BookManager()
title = models.CharField(max_length=100)
published = models.DateField()
authors = models.ManyToManyField(Person)
editor = models.ForeignKey(Person, models.SET_NULL, null=True, related_name='edited')
reviews = GenericRelation(Review)
pages = models.IntegerField(default=100)
def __str__(self):
return self.title
class Meta:
ordering = ('title',)
@python_2_unicode_compatible
class Pet(models.Model):
name = models.CharField(max_length=100)
owner = models.ForeignKey(Person, models.CASCADE)
def __str__(self):
return self.name
class Meta:
ordering = ('name',)
class UserProfile(models.Model):
user = models.OneToOneField(User, models.SET_NULL, null=True)
flavor = models.CharField(max_length=100)
class Meta:
ordering = ('flavor',)
| bsd-3-clause | -4,937,620,199,559,036,000 | 26.775281 | 89 | 0.690939 | false |
janewangfb/spark | examples/src/main/python/transitive_closure.py | 128 | 2408 | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import print_function
import sys
from random import Random
from pyspark.sql import SparkSession
numEdges = 200
numVertices = 100
rand = Random(42)
def generateGraph():
edges = set()
while len(edges) < numEdges:
src = rand.randrange(0, numVertices)
dst = rand.randrange(0, numVertices)
if src != dst:
edges.add((src, dst))
return edges
if __name__ == "__main__":
"""
Usage: transitive_closure [partitions]
"""
spark = SparkSession\
.builder\
.appName("PythonTransitiveClosure")\
.getOrCreate()
partitions = int(sys.argv[1]) if len(sys.argv) > 1 else 2
tc = spark.sparkContext.parallelize(generateGraph(), partitions).cache()
# Linear transitive closure: each round grows paths by one edge,
# by joining the graph's edges with the already-discovered paths.
# e.g. join the path (y, z) from the TC with the edge (x, y) from
# the graph to obtain the path (x, z).
# Because join() joins on keys, the edges are stored in reversed order.
edges = tc.map(lambda x_y: (x_y[1], x_y[0]))
oldCount = 0
nextCount = tc.count()
while True:
oldCount = nextCount
# Perform the join, obtaining an RDD of (y, (z, x)) pairs,
# then project the result to obtain the new (x, z) paths.
new_edges = tc.join(edges).map(lambda __a_b: (__a_b[1][1], __a_b[1][0]))
tc = tc.union(new_edges).distinct().cache()
nextCount = tc.count()
if nextCount == oldCount:
break
print("TC has %i edges" % tc.count())
spark.stop()
| apache-2.0 | -8,044,983,217,867,184,000 | 31.540541 | 80 | 0.660714 | false |
YanTangZhai/tf | tensorflow/python/training/training_ops.py | 1 | 5435 | # Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Python wrappers for training ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.training import gen_training_ops
# pylint: disable=wildcard-import
from tensorflow.python.training.gen_training_ops import *
# pylint: enable=wildcard-import
# Shape functions for fused training ops
# --------------------------------------
#
# The fused training ops all have the same basic structure: they take
# one or more variables with the same shape, and emit a reference to
# the original variable (which has the same shape as the first
# input). In addition, they take one or more scalar tensors containing
# hyperparameters.
#
# The sparse ops take the gradients as a Python IndexedSlices, which
# means that the indices are a vector of length N, and the gradient
# values are a tensor whose size is the same as the original variable,
# except for the 0th dimension, which has size N.
def _AssertInputIsScalar(op, index):
"""Raises ValueError if `op.inputs[index]` is not scalar."""
op.inputs[index].get_shape().assert_is_compatible_with(tensor_shape.scalar())
@ops.RegisterShape("ApplyAdagrad")
def _ApplyAdagradShape(op):
"""Shape function for the ApplyAdagrad op."""
var_shape = op.inputs[0].get_shape()
accum_shape = op.inputs[1].get_shape().merge_with(var_shape)
_AssertInputIsScalar(op, 2) # lr
grad_shape = op.inputs[3].get_shape().merge_with(accum_shape)
return [grad_shape]
@ops.RegisterShape("ApplyAdam")
def _ApplyAdamShape(op):
"""Shape function for the ApplyAdam op."""
var_shape = op.inputs[0].get_shape()
m_shape = op.inputs[1].get_shape().merge_with(var_shape)
v_shape = op.inputs[2].get_shape().merge_with(m_shape)
_AssertInputIsScalar(op, 3) # beta1_power
_AssertInputIsScalar(op, 4) # beta2_power
_AssertInputIsScalar(op, 5) # lr
_AssertInputIsScalar(op, 6) # beta1
_AssertInputIsScalar(op, 7) # beta2
_AssertInputIsScalar(op, 8) # epsilon
grad_shape = op.inputs[9].get_shape().merge_with(v_shape)
return [grad_shape]
@ops.RegisterShape("ApplyMomentum")
def _ApplyMomentumShape(op):
"""Shape function for the ApplyMomentum op."""
var_shape = op.inputs[0].get_shape()
accum_shape = op.inputs[1].get_shape().merge_with(var_shape)
_AssertInputIsScalar(op, 2) # lr
grad_shape = op.inputs[3].get_shape().merge_with(accum_shape)
_AssertInputIsScalar(op, 4) # momentum
return [grad_shape]
@ops.RegisterShape("ApplyRMSProp")
def _ApplyRMSPropShape(op):
"""Shape function for the ApplyRMSProp op."""
var_shape = op.inputs[0].get_shape()
ms_shape = op.inputs[1].get_shape().merge_with(var_shape)
mom_shape = op.inputs[2].get_shape().merge_with(ms_shape)
_AssertInputIsScalar(op, 3) # lr
_AssertInputIsScalar(op, 4) # rho
_AssertInputIsScalar(op, 5) # momentum
_AssertInputIsScalar(op, 6) # epsilon
grad_shape = op.inputs[7].get_shape().merge_with(mom_shape)
return [grad_shape]
@ops.RegisterShape("ApplyGradientDescent")
def _ApplyGradientDescentShape(op):
"""Shape function for the ApplyGradientDescent op."""
var_shape = op.inputs[0].get_shape()
_AssertInputIsScalar(op, 1) # alpha
delta_shape = op.inputs[2].get_shape().merge_with(var_shape)
return [delta_shape]
@ops.RegisterShape("ApplyDistGradientDescent")
def _ApplyDistGradientDescentShape(op):
"""Shape function for the ApplyDistGradientDescent op."""
var_shape = op.inputs[0].get_shape()
_AssertInputIsScalar(op, 1) # alpha
delta_shape = op.inputs[2].get_shape().merge_with(var_shape)
return [delta_shape]
@ops.RegisterShape("SparseApplyAdagrad")
def _SparseApplyAdagradShape(op):
"""Shape function for the SparseApplyAdagrad op."""
var_shape = op.inputs[0].get_shape()
accum_shape = op.inputs[1].get_shape().merge_with(var_shape)
_AssertInputIsScalar(op, 2) # lr
grad_shape = op.inputs[3].get_shape().merge_with(
tensor_shape.TensorShape([None]).concatenate(accum_shape[1:]))
unused_indices_shape = op.inputs[4].get_shape().merge_with(
tensor_shape.vector(grad_shape[0]))
return [accum_shape]
@ops.RegisterShape("SparseApplyMomentum")
def _SparseApplyMomentumShape(op):
"""Shape function for the SparseApplyMomentum op."""
var_shape = op.inputs[0].get_shape()
accum_shape = op.inputs[1].get_shape().merge_with(var_shape)
_AssertInputIsScalar(op, 2) # lr
grad_shape = op.inputs[3].get_shape().merge_with(
tensor_shape.TensorShape([None]).concatenate(accum_shape[1:]))
unused_indices_shape = op.inputs[4].get_shape().merge_with(
tensor_shape.vector(grad_shape[0]))
_AssertInputIsScalar(op, 5) # momentum
return [accum_shape]
| apache-2.0 | 4,176,143,315,333,818,000 | 37.546099 | 80 | 0.711132 | false |
ashang/calibre | src/calibre/gui2/actions/preferences.py | 14 | 2810 | #!/usr/bin/env python2
# vim:fileencoding=UTF-8:ts=4:sw=4:sta:et:sts=4:ai
__license__ = 'GPL v3'
__copyright__ = '2010, Kovid Goyal <[email protected]>'
__docformat__ = 'restructuredtext en'
from functools import partial
from PyQt5.Qt import QIcon, Qt
from calibre.gui2.actions import InterfaceAction
from calibre.gui2.preferences.main import Preferences
from calibre.gui2 import error_dialog, show_restart_warning
from calibre.constants import DEBUG, isosx
class PreferencesAction(InterfaceAction):
name = 'Preferences'
action_spec = (_('Preferences'), 'config.png', _('Configure calibre'), 'Ctrl+P')
action_add_menu = True
action_menu_clone_qaction = _('Change calibre behavior')
def genesis(self):
pm = self.qaction.menu()
cm = partial(self.create_menu_action, pm)
if isosx:
pm.addAction(QIcon(I('config.png')), _('Preferences'), self.do_config)
cm('welcome wizard', _('Run welcome wizard'),
icon='wizard.png', triggered=self.gui.run_wizard)
cm('plugin updater', _('Get plugins to enhance calibre'),
icon='plugins/plugin_updater.png', triggered=self.get_plugins)
if not DEBUG:
pm.addSeparator()
cm('restart', _('Restart in debug mode'), icon='debug.png',
triggered=self.debug_restart, shortcut='Ctrl+Shift+R')
self.preferences_menu = pm
for x in (self.gui.preferences_action, self.qaction):
x.triggered.connect(self.do_config)
def get_plugins(self):
from calibre.gui2.dialogs.plugin_updater import (PluginUpdaterDialog,
FILTER_NOT_INSTALLED)
d = PluginUpdaterDialog(self.gui,
initial_filter=FILTER_NOT_INSTALLED)
d.exec_()
if d.do_restart:
self.gui.quit(restart=True)
def do_config(self, checked=False, initial_plugin=None,
close_after_initial=False):
if self.gui.job_manager.has_jobs():
d = error_dialog(self.gui, _('Cannot configure'),
_('Cannot configure while there are running jobs.'))
d.exec_()
return
if self.gui.must_restart_before_config:
do_restart = show_restart_warning(_('Cannot configure before calibre is restarted.'))
if do_restart:
self.gui.quit(restart=True)
return
d = Preferences(self.gui, initial_plugin=initial_plugin,
close_after_initial=close_after_initial)
d.run_wizard_requested.connect(self.gui.run_wizard,
type=Qt.QueuedConnection)
d.exec_()
if d.do_restart:
self.gui.quit(restart=True)
def debug_restart(self, *args):
self.gui.quit(restart=True, debug_on_restart=True)
| gpl-3.0 | 3,919,851,317,819,322,400 | 37.493151 | 97 | 0.621352 | false |
mlq/fMBT | utils/setup.py | 5 | 4003 | #!/usr/bin/env python2
from distutils.core import setup, Extension
import os
import subprocess
import sys
def pkg_config(package):
if os.name == "nt":
if package == "MagickCore":
# pkg-config cannot be used, try to find needed libraries from the filesystem
import fnmatch
libraries = ["CORE_RL_magick_"]
library_dirs = []
include_dirs = []
missing_libs = set(["kernel32.lib"] + [l + ".lib" for l in libraries])
missing_headers = set(["magick/MagickCore.h"])
for rootdir, dirnames, filenames in os.walk(os.environ["ProgramFiles"]):
for library in sorted(missing_libs):
if fnmatch.filter(filenames, library) and not "x64" in rootdir:
library_dirs.append(rootdir)
missing_libs.remove(library)
if not missing_libs:
break
for header in missing_headers:
if fnmatch.filter(filenames, os.path.basename(header)):
if os.path.dirname(header) == "":
include_dirs.append(rootdir)
elif os.path.dirname(header) == os.path.basename(rootdir):
include_dirs.append(os.path.dirname(rootdir))
missing_headers.remove(header)
if not missing_headers:
break
if not missing_libs and not missing_headers:
break
ext_args = {
"libraries": libraries,
"library_dirs": sorted(set(library_dirs)),
"include_dirs": include_dirs,
}
else:
sys.stderr.write('Unknown build parameters for package "%s"\n' % (package,))
sys.exit(1)
else:
_pkg_config = os.getenv("PKG_CONFIG","pkg-config")
o = subprocess.check_output([_pkg_config, "--libs", "--cflags", package])
ext_args = {"libraries": [], "library_dirs": [], "include_dirs": [], "extra_compile_args": []}
for arg in o.split():
if arg.startswith("-L"):
ext_args["library_dirs"].append(arg[2:])
elif arg.startswith("-l"):
ext_args["libraries"].append(arg[2:])
elif arg.startswith("-I"):
ext_args["include_dirs"].append(arg[2:])
elif arg.startswith("-f") or arg.startswith("-D"):
ext_args["extra_compile_args"].append(arg)
else:
raise ValueError('Unexpected pkg-config output: "%s" in "%s"'
% (arg, o))
return ext_args
fmbt_utils_dir = os.path.join(os.path.abspath(os.path.dirname(__file__)),os.getenv("VPATH",""))
fmbt_dir = os.path.join(fmbt_utils_dir, "..")
version = (file(os.path.join(fmbt_dir, "configure.ac"), "r")
.readline()
.split(",")[1]
.replace(' ','')
.replace('[','')
.replace(']',''))
lines = [line.replace("\\","").strip()
for line in file(os.path.join(fmbt_utils_dir, "Makefile.am"))
if not "=" in line]
modules = [module.replace(".py","")
for module in lines[lines.index("# modules")+1:
lines.index("# end of modules")]]
scripts = lines[lines.index("# scripts")+1:
lines.index("# end of scripts")]
eye4graphcs_buildflags = pkg_config("MagickCore")
ext_eye4graphics = Extension('eye4graphics',
sources = ['eye4graphics.cc'],
**eye4graphcs_buildflags)
setup(name = 'fmbt-python',
version = version,
description = 'fMBT Python tools and libraries',
author = 'Antti Kervinen',
author_email = '[email protected]',
py_modules = modules,
scripts = scripts,
ext_modules = [ext_eye4graphics]
)
| lgpl-2.1 | -6,948,949,243,247,011,000 | 40.697917 | 102 | 0.513365 | false |
switchboardOp/ansible | lib/ansible/modules/network/netscaler/netscaler_server.py | 14 | 12592 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright (c) 2017 Citrix Systems
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: netscaler_server
short_description: Manage server configuration
description:
- Manage server entities configuration.
- This module is intended to run either on the ansible control node or a bastion (jumpserver) with access to the actual netscaler instance.
version_added: "2.4.0"
author: George Nikolopoulos (@giorgos-nikolopoulos)
options:
name:
description:
- "Name for the server."
- >-
Must begin with an ASCII alphabetic or underscore C(_) character, and must contain only ASCII
alphanumeric, underscore C(_), hash C(#), period C(.), space C( ), colon C(:), at C(@), equals C(=), and hyphen C(-)
characters.
- "Can be changed after the name is created."
- "Minimum length = 1"
ipaddress:
description:
- >-
IPv4 or IPv6 address of the server. If you create an IP address based server, you can specify the
name of the server, instead of its IP address, when creating a service. Note: If you do not create a
server entry, the server IP address that you enter when you create a service becomes the name of the
server.
domain:
description:
- "Domain name of the server. For a domain based configuration, you must create the server first."
- "Minimum length = 1"
translationip:
description:
- "IP address used to transform the server's DNS-resolved IP address."
translationmask:
description:
- "The netmask of the translation ip."
domainresolveretry:
description:
- >-
Time, in seconds, for which the NetScaler appliance must wait, after DNS resolution fails, before
sending the next DNS query to resolve the domain name.
- "Minimum value = C(5)"
- "Maximum value = C(20939)"
default: 5
ipv6address:
description:
- >-
Support IPv6 addressing mode. If you configure a server with the IPv6 addressing mode, you cannot use
the server in the IPv4 addressing mode.
default: false
type: bool
comment:
description:
- "Any information about the server."
td:
description:
- >-
Integer value that uniquely identifies the traffic domain in which you want to configure the entity.
If you do not specify an ID, the entity becomes part of the default traffic domain, which has an ID
of 0.
- "Minimum value = C(0)"
- "Maximum value = C(4094)"
disabled:
description:
- When set to C(true) the server state will be set to DISABLED.
- When set to C(false) the server state will be set to ENABLED.
- >-
Note that due to limitations of the underlying NITRO API a C(disabled) state change alone
does not cause the module result to report a changed status.
type: bool
default: false
extends_documentation_fragment: netscaler
requirements:
- nitro python sdk
'''
EXAMPLES = '''
- name: Setup server
delegate_to: localhost
netscaler_server:
nsip: 172.18.0.2
nitro_user: nsroot
nitro_pass: nsroot
state: present
name: server-1
ipaddress: 192.168.1.1
'''
RETURN = '''
loglines:
description: list of logged messages by the module
returned: always
type: list
sample: ['message 1', 'message 2']
msg:
description: Message detailing the failure reason
returned: failure
type: str
sample: "Action does not exist"
diff:
description: List of differences between the actual configured object and the configuration specified in the module
returned: failure
type: dict
sample: { 'targetlbvserver': 'difference. ours: (str) server1 other: (str) server2' }
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.netscaler import ConfigProxy, get_nitro_client, netscaler_common_arguments, log, loglines, get_immutables_intersection
try:
from nssrc.com.citrix.netscaler.nitro.resource.config.basic.server import server
from nssrc.com.citrix.netscaler.nitro.exception.nitro_exception import nitro_exception
PYTHON_SDK_IMPORTED = True
except ImportError as e:
PYTHON_SDK_IMPORTED = False
def server_exists(client, module):
log('Checking if server exists')
if server.count_filtered(client, 'name:%s' % module.params['name']) > 0:
return True
else:
return False
def server_identical(client, module, server_proxy):
log('Checking if configured server is identical')
if server.count_filtered(client, 'name:%s' % module.params['name']) == 0:
return False
server_list = server.get_filtered(client, 'name:%s' % module.params['name'])
if server_proxy.has_equal_attributes(server_list[0]):
return True
else:
return False
def diff_list(client, module, server_proxy):
ret_val = server_proxy.diff_object(server.get_filtered(client, 'name:%s' % module.params['name'])[0]),
return ret_val[0]
def do_state_change(client, module, server_proxy):
if module.params['disabled']:
log('Disabling server')
result = server.disable(client, server_proxy.actual)
else:
log('Enabling server')
result = server.enable(client, server_proxy.actual)
return result
def main():
module_specific_arguments = dict(
name=dict(type='str'),
ipaddress=dict(type='str'),
domain=dict(type='str'),
translationip=dict(type='str'),
translationmask=dict(type='str'),
domainresolveretry=dict(type='int'),
ipv6address=dict(
type='bool',
default=False
),
comment=dict(type='str'),
td=dict(type='float'),
)
hand_inserted_arguments = dict(
disabled=dict(
type='bool',
default=False,
),
)
argument_spec = dict()
argument_spec.update(netscaler_common_arguments)
argument_spec.update(module_specific_arguments)
argument_spec.update(hand_inserted_arguments)
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True,
)
module_result = dict(
changed=False,
failed=False,
loglines=loglines,
)
# Fail the module if imports failed
if not PYTHON_SDK_IMPORTED:
module.fail_json(msg='Could not load nitro python sdk')
# Fallthrough to rest of execution
client = get_nitro_client(module)
try:
client.login()
except nitro_exception as e:
msg = "nitro exception during login. errorcode=%s, message=%s" % (str(e.errorcode), e.message)
module.fail_json(msg=msg)
except Exception as e:
if str(type(e)) == "<class 'requests.exceptions.ConnectionError'>":
module.fail_json(msg='Connection error %s' % str(e))
elif str(type(e)) == "<class 'requests.exceptions.SSLError'>":
module.fail_json(msg='SSL Error %s' % str(e))
else:
module.fail_json(msg='Unexpected error during login %s' % str(e))
# Instantiate Server Config object
readwrite_attrs = [
'name',
'ipaddress',
'domain',
'translationip',
'translationmask',
'domainresolveretry',
'ipv6address',
'comment',
'td',
]
readonly_attrs = [
'statechangetimesec',
'tickssincelaststatechange',
'autoscale',
'customserverid',
'monthreshold',
'maxclient',
'maxreq',
'maxbandwidth',
'usip',
'cka',
'tcpb',
'cmp',
'clttimeout',
'svrtimeout',
'cipheader',
'cip',
'cacheable',
'sc',
'sp',
'downstateflush',
'appflowlog',
'boundtd',
'__count',
]
immutable_attrs = [
'name',
'domain',
'ipv6address',
'td',
]
transforms = {
'ipv6address': ['bool_yes_no'],
}
server_proxy = ConfigProxy(
actual=server(),
client=client,
attribute_values_dict=module.params,
readwrite_attrs=readwrite_attrs,
readonly_attrs=readonly_attrs,
immutable_attrs=immutable_attrs,
transforms=transforms,
)
try:
# Apply appropriate state
if module.params['state'] == 'present':
log('Applying actions for state present')
if not server_exists(client, module):
if not module.check_mode:
server_proxy.add()
if module.params['save_config']:
client.save_config()
module_result['changed'] = True
elif not server_identical(client, module, server_proxy):
# Check if we try to change value of immutable attributes
immutables_changed = get_immutables_intersection(server_proxy, diff_list(client, module, server_proxy).keys())
if immutables_changed != []:
msg = 'Cannot update immutable attributes %s' % (immutables_changed,)
module.fail_json(msg=msg, diff=diff_list(client, module, server_proxy), **module_result)
if not module.check_mode:
server_proxy.update()
if module.params['save_config']:
client.save_config()
module_result['changed'] = True
else:
module_result['changed'] = False
if not module.check_mode:
res = do_state_change(client, module, server_proxy)
if res.errorcode != 0:
msg = 'Error when setting disabled state. errorcode: %s message: %s' % (res.errorcode, res.message)
module.fail_json(msg=msg, **module_result)
# Sanity check for result
log('Sanity checks for state present')
if not module.check_mode:
if not server_exists(client, module):
module.fail_json(msg='Server does not seem to exist', **module_result)
if not server_identical(client, module, server_proxy):
module.fail_json(
msg='Server is not configured according to parameters given',
diff=diff_list(client, module, server_proxy),
**module_result
)
elif module.params['state'] == 'absent':
log('Applying actions for state absent')
if server_exists(client, module):
if not module.check_mode:
server_proxy.delete()
if module.params['save_config']:
client.save_config()
module_result['changed'] = True
else:
module_result['changed'] = False
# Sanity check for result
log('Sanity checks for state absent')
if not module.check_mode:
if server_exists(client, module):
module.fail_json(msg='Server seems to be present', **module_result)
except nitro_exception as e:
msg = "nitro exception errorcode=%s, message=%s" % (str(e.errorcode), e.message)
module.fail_json(msg=msg, **module_result)
client.logout()
module.exit_json(**module_result)
if __name__ == "__main__":
main()
| gpl-3.0 | -7,921,138,041,867,247,000 | 31.621762 | 144 | 0.591487 | false |
tinloaf/home-assistant | homeassistant/components/binary_sensor/__init__.py | 4 | 3179 | """
Component to interface with binary sensors.
For more details about this component, please refer to the documentation at
https://home-assistant.io/components/binary_sensor/
"""
from datetime import timedelta
import logging
import voluptuous as vol
from homeassistant.helpers.entity_component import EntityComponent
from homeassistant.helpers.entity import Entity
from homeassistant.const import (STATE_ON, STATE_OFF)
from homeassistant.helpers.config_validation import PLATFORM_SCHEMA # noqa
DOMAIN = 'binary_sensor'
SCAN_INTERVAL = timedelta(seconds=30)
ENTITY_ID_FORMAT = DOMAIN + '.{}'
DEVICE_CLASSES = [
'battery', # On means low, Off means normal
'cold', # On means cold, Off means normal
'connectivity', # On means connected, Off means disconnected
'door', # On means open, Off means closed
'garage_door', # On means open, Off means closed
'gas', # On means gas detected, Off means no gas (clear)
'heat', # On means hot, Off means normal
'light', # On means light detected, Off means no light
'lock', # On means open (unlocked), Off means closed (locked)
'moisture', # On means wet, Off means dry
'motion', # On means motion detected, Off means no motion (clear)
'moving', # On means moving, Off means not moving (stopped)
'occupancy', # On means occupied, Off means not occupied (clear)
'opening', # On means open, Off means closed
'plug', # On means plugged in, Off means unplugged
'power', # On means power detected, Off means no power
'presence', # On means home, Off means away
'problem', # On means problem detected, Off means no problem (OK)
'safety', # On means unsafe, Off means safe
'smoke', # On means smoke detected, Off means no smoke (clear)
'sound', # On means sound detected, Off means no sound (clear)
'vibration', # On means vibration detected, Off means no vibration
'window', # On means open, Off means closed
]
DEVICE_CLASSES_SCHEMA = vol.All(vol.Lower, vol.In(DEVICE_CLASSES))
async def async_setup(hass, config):
"""Track states and offer events for binary sensors."""
component = hass.data[DOMAIN] = EntityComponent(
logging.getLogger(__name__), DOMAIN, hass, SCAN_INTERVAL)
await component.async_setup(config)
return True
async def async_setup_entry(hass, entry):
"""Set up a config entry."""
return await hass.data[DOMAIN].async_setup_entry(entry)
async def async_unload_entry(hass, entry):
"""Unload a config entry."""
return await hass.data[DOMAIN].async_unload_entry(entry)
class BinarySensorDevice(Entity):
"""Represent a binary sensor."""
@property
def is_on(self):
"""Return true if the binary sensor is on."""
return None
@property
def state(self):
"""Return the state of the binary sensor."""
return STATE_ON if self.is_on else STATE_OFF
@property
def device_class(self):
"""Return the class of this device, from component DEVICE_CLASSES."""
return None
| apache-2.0 | 1,237,085,062,904,746,000 | 35.965116 | 77 | 0.657754 | false |
TempSquad/TEMP2016-site-client | node_modules/gulp-sass/node_modules/node-sass/node_modules/node-gyp/gyp/pylib/gyp/xml_fix.py | 2767 | 2174 | # Copyright (c) 2011 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Applies a fix to CR LF TAB handling in xml.dom.
Fixes this: http://code.google.com/p/chromium/issues/detail?id=76293
Working around this: http://bugs.python.org/issue5752
TODO(bradnelson): Consider dropping this when we drop XP support.
"""
import xml.dom.minidom
def _Replacement_write_data(writer, data, is_attrib=False):
"""Writes datachars to writer."""
data = data.replace("&", "&").replace("<", "<")
data = data.replace("\"", """).replace(">", ">")
if is_attrib:
data = data.replace(
"\r", "
").replace(
"\n", "
").replace(
"\t", "	")
writer.write(data)
def _Replacement_writexml(self, writer, indent="", addindent="", newl=""):
# indent = current indentation
# addindent = indentation to add to higher levels
# newl = newline string
writer.write(indent+"<" + self.tagName)
attrs = self._get_attributes()
a_names = attrs.keys()
a_names.sort()
for a_name in a_names:
writer.write(" %s=\"" % a_name)
_Replacement_write_data(writer, attrs[a_name].value, is_attrib=True)
writer.write("\"")
if self.childNodes:
writer.write(">%s" % newl)
for node in self.childNodes:
node.writexml(writer, indent + addindent, addindent, newl)
writer.write("%s</%s>%s" % (indent, self.tagName, newl))
else:
writer.write("/>%s" % newl)
class XmlFix(object):
"""Object to manage temporary patching of xml.dom.minidom."""
def __init__(self):
# Preserve current xml.dom.minidom functions.
self.write_data = xml.dom.minidom._write_data
self.writexml = xml.dom.minidom.Element.writexml
# Inject replacement versions of a function and a method.
xml.dom.minidom._write_data = _Replacement_write_data
xml.dom.minidom.Element.writexml = _Replacement_writexml
def Cleanup(self):
if self.write_data:
xml.dom.minidom._write_data = self.write_data
xml.dom.minidom.Element.writexml = self.writexml
self.write_data = None
def __del__(self):
self.Cleanup()
| apache-2.0 | -7,195,404,887,630,765,000 | 30.507246 | 74 | 0.658234 | false |
ujjvala-addsol/addsol_hr | openerp/addons/base_report_designer/__init__.py | 421 | 1136 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import wizard
import base_report_designer
import installer
import openerp_sxw2rml
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | 3,931,537,070,822,987,000 | 42.692308 | 79 | 0.625 | false |
ParuninPavel/lenta4_hack | vkapp/contrib/sites/migrations/0003_set_site_domain_and_name.py | 1 | 1085 | """
To understand why this file is here, please read:
http://cookiecutter-django.readthedocs.io/en/latest/faq.html#why-is-there-a-django-contrib-sites-directory-in-cookiecutter-django
"""
from django.conf import settings
from django.db import migrations
def update_site_forward(apps, schema_editor):
"""Set site domain and name."""
Site = apps.get_model('sites', 'Site')
Site.objects.update_or_create(
id=settings.SITE_ID,
defaults={
'domain': 'lentach.cleverbots.ru',
'name': 'lentach'
}
)
def update_site_backward(apps, schema_editor):
"""Revert site domain and name to default."""
Site = apps.get_model('sites', 'Site')
Site.objects.update_or_create(
id=settings.SITE_ID,
defaults={
'domain': 'example.com',
'name': 'example.com'
}
)
class Migration(migrations.Migration):
dependencies = [
('sites', '0002_alter_domain_unique'),
]
operations = [
migrations.RunPython(update_site_forward, update_site_backward),
]
| mit | -5,036,127,893,043,715,000 | 24.833333 | 129 | 0.623963 | false |
sunlianqiang/kbengine | kbe/res/scripts/common/Lib/test/test_zipfile.py | 60 | 70441 | import io
import os
import sys
import importlib.util
import time
import struct
import zipfile
import unittest
from tempfile import TemporaryFile
from random import randint, random, getrandbits
from test.support import (TESTFN, findfile, unlink, rmtree,
requires_zlib, requires_bz2, requires_lzma,
captured_stdout, check_warnings)
TESTFN2 = TESTFN + "2"
TESTFNDIR = TESTFN + "d"
FIXEDTEST_SIZE = 1000
DATAFILES_DIR = 'zipfile_datafiles'
SMALL_TEST_DATA = [('_ziptest1', '1q2w3e4r5t'),
('ziptest2dir/_ziptest2', 'qawsedrftg'),
('ziptest2dir/ziptest3dir/_ziptest3', 'azsxdcfvgb'),
('ziptest2dir/ziptest3dir/ziptest4dir/_ziptest3', '6y7u8i9o0p')]
def get_files(test):
yield TESTFN2
with TemporaryFile() as f:
yield f
test.assertFalse(f.closed)
with io.BytesIO() as f:
yield f
test.assertFalse(f.closed)
def openU(zipfp, fn):
with check_warnings(('', DeprecationWarning)):
return zipfp.open(fn, 'rU')
class AbstractTestsWithSourceFile:
@classmethod
def setUpClass(cls):
cls.line_gen = [bytes("Zipfile test line %d. random float: %f\n" %
(i, random()), "ascii")
for i in range(FIXEDTEST_SIZE)]
cls.data = b''.join(cls.line_gen)
def setUp(self):
# Make a source file with some lines
with open(TESTFN, "wb") as fp:
fp.write(self.data)
def make_test_archive(self, f, compression):
# Create the ZIP archive
with zipfile.ZipFile(f, "w", compression) as zipfp:
zipfp.write(TESTFN, "another.name")
zipfp.write(TESTFN, TESTFN)
zipfp.writestr("strfile", self.data)
def zip_test(self, f, compression):
self.make_test_archive(f, compression)
# Read the ZIP archive
with zipfile.ZipFile(f, "r", compression) as zipfp:
self.assertEqual(zipfp.read(TESTFN), self.data)
self.assertEqual(zipfp.read("another.name"), self.data)
self.assertEqual(zipfp.read("strfile"), self.data)
# Print the ZIP directory
fp = io.StringIO()
zipfp.printdir(file=fp)
directory = fp.getvalue()
lines = directory.splitlines()
self.assertEqual(len(lines), 4) # Number of files + header
self.assertIn('File Name', lines[0])
self.assertIn('Modified', lines[0])
self.assertIn('Size', lines[0])
fn, date, time_, size = lines[1].split()
self.assertEqual(fn, 'another.name')
self.assertTrue(time.strptime(date, '%Y-%m-%d'))
self.assertTrue(time.strptime(time_, '%H:%M:%S'))
self.assertEqual(size, str(len(self.data)))
# Check the namelist
names = zipfp.namelist()
self.assertEqual(len(names), 3)
self.assertIn(TESTFN, names)
self.assertIn("another.name", names)
self.assertIn("strfile", names)
# Check infolist
infos = zipfp.infolist()
names = [i.filename for i in infos]
self.assertEqual(len(names), 3)
self.assertIn(TESTFN, names)
self.assertIn("another.name", names)
self.assertIn("strfile", names)
for i in infos:
self.assertEqual(i.file_size, len(self.data))
# check getinfo
for nm in (TESTFN, "another.name", "strfile"):
info = zipfp.getinfo(nm)
self.assertEqual(info.filename, nm)
self.assertEqual(info.file_size, len(self.data))
# Check that testzip doesn't raise an exception
zipfp.testzip()
def test_basic(self):
for f in get_files(self):
self.zip_test(f, self.compression)
def zip_open_test(self, f, compression):
self.make_test_archive(f, compression)
# Read the ZIP archive
with zipfile.ZipFile(f, "r", compression) as zipfp:
zipdata1 = []
with zipfp.open(TESTFN) as zipopen1:
while True:
read_data = zipopen1.read(256)
if not read_data:
break
zipdata1.append(read_data)
zipdata2 = []
with zipfp.open("another.name") as zipopen2:
while True:
read_data = zipopen2.read(256)
if not read_data:
break
zipdata2.append(read_data)
self.assertEqual(b''.join(zipdata1), self.data)
self.assertEqual(b''.join(zipdata2), self.data)
def test_open(self):
for f in get_files(self):
self.zip_open_test(f, self.compression)
def zip_random_open_test(self, f, compression):
self.make_test_archive(f, compression)
# Read the ZIP archive
with zipfile.ZipFile(f, "r", compression) as zipfp:
zipdata1 = []
with zipfp.open(TESTFN) as zipopen1:
while True:
read_data = zipopen1.read(randint(1, 1024))
if not read_data:
break
zipdata1.append(read_data)
self.assertEqual(b''.join(zipdata1), self.data)
def test_random_open(self):
for f in get_files(self):
self.zip_random_open_test(f, self.compression)
def zip_read1_test(self, f, compression):
self.make_test_archive(f, compression)
# Read the ZIP archive
with zipfile.ZipFile(f, "r") as zipfp, \
zipfp.open(TESTFN) as zipopen:
zipdata = []
while True:
read_data = zipopen.read1(-1)
if not read_data:
break
zipdata.append(read_data)
self.assertEqual(b''.join(zipdata), self.data)
def test_read1(self):
for f in get_files(self):
self.zip_read1_test(f, self.compression)
def zip_read1_10_test(self, f, compression):
self.make_test_archive(f, compression)
# Read the ZIP archive
with zipfile.ZipFile(f, "r") as zipfp, \
zipfp.open(TESTFN) as zipopen:
zipdata = []
while True:
read_data = zipopen.read1(10)
self.assertLessEqual(len(read_data), 10)
if not read_data:
break
zipdata.append(read_data)
self.assertEqual(b''.join(zipdata), self.data)
def test_read1_10(self):
for f in get_files(self):
self.zip_read1_10_test(f, self.compression)
def zip_readline_read_test(self, f, compression):
self.make_test_archive(f, compression)
# Read the ZIP archive
with zipfile.ZipFile(f, "r") as zipfp, \
zipfp.open(TESTFN) as zipopen:
data = b''
while True:
read = zipopen.readline()
if not read:
break
data += read
read = zipopen.read(100)
if not read:
break
data += read
self.assertEqual(data, self.data)
def test_readline_read(self):
# Issue #7610: calls to readline() interleaved with calls to read().
for f in get_files(self):
self.zip_readline_read_test(f, self.compression)
def zip_readline_test(self, f, compression):
self.make_test_archive(f, compression)
# Read the ZIP archive
with zipfile.ZipFile(f, "r") as zipfp:
with zipfp.open(TESTFN) as zipopen:
for line in self.line_gen:
linedata = zipopen.readline()
self.assertEqual(linedata, line)
def test_readline(self):
for f in get_files(self):
self.zip_readline_test(f, self.compression)
def zip_readlines_test(self, f, compression):
self.make_test_archive(f, compression)
# Read the ZIP archive
with zipfile.ZipFile(f, "r") as zipfp:
with zipfp.open(TESTFN) as zipopen:
ziplines = zipopen.readlines()
for line, zipline in zip(self.line_gen, ziplines):
self.assertEqual(zipline, line)
def test_readlines(self):
for f in get_files(self):
self.zip_readlines_test(f, self.compression)
def zip_iterlines_test(self, f, compression):
self.make_test_archive(f, compression)
# Read the ZIP archive
with zipfile.ZipFile(f, "r") as zipfp:
with zipfp.open(TESTFN) as zipopen:
for line, zipline in zip(self.line_gen, zipopen):
self.assertEqual(zipline, line)
def test_iterlines(self):
for f in get_files(self):
self.zip_iterlines_test(f, self.compression)
def test_low_compression(self):
"""Check for cases where compressed data is larger than original."""
# Create the ZIP archive
with zipfile.ZipFile(TESTFN2, "w", self.compression) as zipfp:
zipfp.writestr("strfile", '12')
# Get an open object for strfile
with zipfile.ZipFile(TESTFN2, "r", self.compression) as zipfp:
with zipfp.open("strfile") as openobj:
self.assertEqual(openobj.read(1), b'1')
self.assertEqual(openobj.read(1), b'2')
def test_writestr_compression(self):
zipfp = zipfile.ZipFile(TESTFN2, "w")
zipfp.writestr("b.txt", "hello world", compress_type=self.compression)
info = zipfp.getinfo('b.txt')
self.assertEqual(info.compress_type, self.compression)
def test_read_return_size(self):
# Issue #9837: ZipExtFile.read() shouldn't return more bytes
# than requested.
for test_size in (1, 4095, 4096, 4097, 16384):
file_size = test_size + 1
junk = getrandbits(8 * file_size).to_bytes(file_size, 'little')
with zipfile.ZipFile(io.BytesIO(), "w", self.compression) as zipf:
zipf.writestr('foo', junk)
with zipf.open('foo', 'r') as fp:
buf = fp.read(test_size)
self.assertEqual(len(buf), test_size)
def test_truncated_zipfile(self):
fp = io.BytesIO()
with zipfile.ZipFile(fp, mode='w') as zipf:
zipf.writestr('strfile', self.data, compress_type=self.compression)
end_offset = fp.tell()
zipfiledata = fp.getvalue()
fp = io.BytesIO(zipfiledata)
with zipfile.ZipFile(fp) as zipf:
with zipf.open('strfile') as zipopen:
fp.truncate(end_offset - 20)
with self.assertRaises(EOFError):
zipopen.read()
fp = io.BytesIO(zipfiledata)
with zipfile.ZipFile(fp) as zipf:
with zipf.open('strfile') as zipopen:
fp.truncate(end_offset - 20)
with self.assertRaises(EOFError):
while zipopen.read(100):
pass
fp = io.BytesIO(zipfiledata)
with zipfile.ZipFile(fp) as zipf:
with zipf.open('strfile') as zipopen:
fp.truncate(end_offset - 20)
with self.assertRaises(EOFError):
while zipopen.read1(100):
pass
def tearDown(self):
unlink(TESTFN)
unlink(TESTFN2)
class StoredTestsWithSourceFile(AbstractTestsWithSourceFile,
unittest.TestCase):
compression = zipfile.ZIP_STORED
test_low_compression = None
def zip_test_writestr_permissions(self, f, compression):
# Make sure that writestr creates files with mode 0600,
# when it is passed a name rather than a ZipInfo instance.
self.make_test_archive(f, compression)
with zipfile.ZipFile(f, "r") as zipfp:
zinfo = zipfp.getinfo('strfile')
self.assertEqual(zinfo.external_attr, 0o600 << 16)
def test_writestr_permissions(self):
for f in get_files(self):
self.zip_test_writestr_permissions(f, zipfile.ZIP_STORED)
def test_absolute_arcnames(self):
with zipfile.ZipFile(TESTFN2, "w", zipfile.ZIP_STORED) as zipfp:
zipfp.write(TESTFN, "/absolute")
with zipfile.ZipFile(TESTFN2, "r", zipfile.ZIP_STORED) as zipfp:
self.assertEqual(zipfp.namelist(), ["absolute"])
def test_append_to_zip_file(self):
"""Test appending to an existing zipfile."""
with zipfile.ZipFile(TESTFN2, "w", zipfile.ZIP_STORED) as zipfp:
zipfp.write(TESTFN, TESTFN)
with zipfile.ZipFile(TESTFN2, "a", zipfile.ZIP_STORED) as zipfp:
zipfp.writestr("strfile", self.data)
self.assertEqual(zipfp.namelist(), [TESTFN, "strfile"])
def test_append_to_non_zip_file(self):
"""Test appending to an existing file that is not a zipfile."""
# NOTE: this test fails if len(d) < 22 because of the first
# line "fpin.seek(-22, 2)" in _EndRecData
data = b'I am not a ZipFile!'*10
with open(TESTFN2, 'wb') as f:
f.write(data)
with zipfile.ZipFile(TESTFN2, "a", zipfile.ZIP_STORED) as zipfp:
zipfp.write(TESTFN, TESTFN)
with open(TESTFN2, 'rb') as f:
f.seek(len(data))
with zipfile.ZipFile(f, "r") as zipfp:
self.assertEqual(zipfp.namelist(), [TESTFN])
def test_ignores_newline_at_end(self):
with zipfile.ZipFile(TESTFN2, "w", zipfile.ZIP_STORED) as zipfp:
zipfp.write(TESTFN, TESTFN)
with open(TESTFN2, 'a') as f:
f.write("\r\n\00\00\00")
with zipfile.ZipFile(TESTFN2, "r") as zipfp:
self.assertIsInstance(zipfp, zipfile.ZipFile)
def test_ignores_stuff_appended_past_comments(self):
with zipfile.ZipFile(TESTFN2, "w", zipfile.ZIP_STORED) as zipfp:
zipfp.comment = b"this is a comment"
zipfp.write(TESTFN, TESTFN)
with open(TESTFN2, 'a') as f:
f.write("abcdef\r\n")
with zipfile.ZipFile(TESTFN2, "r") as zipfp:
self.assertIsInstance(zipfp, zipfile.ZipFile)
self.assertEqual(zipfp.comment, b"this is a comment")
def test_write_default_name(self):
"""Check that calling ZipFile.write without arcname specified
produces the expected result."""
with zipfile.ZipFile(TESTFN2, "w") as zipfp:
zipfp.write(TESTFN)
with open(TESTFN, "rb") as f:
self.assertEqual(zipfp.read(TESTFN), f.read())
def test_write_to_readonly(self):
"""Check that trying to call write() on a readonly ZipFile object
raises a RuntimeError."""
with zipfile.ZipFile(TESTFN2, mode="w") as zipfp:
zipfp.writestr("somefile.txt", "bogus")
with zipfile.ZipFile(TESTFN2, mode="r") as zipfp:
self.assertRaises(RuntimeError, zipfp.write, TESTFN)
def test_add_file_before_1980(self):
# Set atime and mtime to 1970-01-01
os.utime(TESTFN, (0, 0))
with zipfile.ZipFile(TESTFN2, "w") as zipfp:
self.assertRaises(ValueError, zipfp.write, TESTFN)
@requires_zlib
class DeflateTestsWithSourceFile(AbstractTestsWithSourceFile,
unittest.TestCase):
compression = zipfile.ZIP_DEFLATED
def test_per_file_compression(self):
"""Check that files within a Zip archive can have different
compression options."""
with zipfile.ZipFile(TESTFN2, "w") as zipfp:
zipfp.write(TESTFN, 'storeme', zipfile.ZIP_STORED)
zipfp.write(TESTFN, 'deflateme', zipfile.ZIP_DEFLATED)
sinfo = zipfp.getinfo('storeme')
dinfo = zipfp.getinfo('deflateme')
self.assertEqual(sinfo.compress_type, zipfile.ZIP_STORED)
self.assertEqual(dinfo.compress_type, zipfile.ZIP_DEFLATED)
@requires_bz2
class Bzip2TestsWithSourceFile(AbstractTestsWithSourceFile,
unittest.TestCase):
compression = zipfile.ZIP_BZIP2
@requires_lzma
class LzmaTestsWithSourceFile(AbstractTestsWithSourceFile,
unittest.TestCase):
compression = zipfile.ZIP_LZMA
class AbstractTestZip64InSmallFiles:
# These tests test the ZIP64 functionality without using large files,
# see test_zipfile64 for proper tests.
@classmethod
def setUpClass(cls):
line_gen = (bytes("Test of zipfile line %d." % i, "ascii")
for i in range(0, FIXEDTEST_SIZE))
cls.data = b'\n'.join(line_gen)
def setUp(self):
self._limit = zipfile.ZIP64_LIMIT
zipfile.ZIP64_LIMIT = 5
# Make a source file with some lines
with open(TESTFN, "wb") as fp:
fp.write(self.data)
def zip_test(self, f, compression):
# Create the ZIP archive
with zipfile.ZipFile(f, "w", compression, allowZip64=True) as zipfp:
zipfp.write(TESTFN, "another.name")
zipfp.write(TESTFN, TESTFN)
zipfp.writestr("strfile", self.data)
# Read the ZIP archive
with zipfile.ZipFile(f, "r", compression) as zipfp:
self.assertEqual(zipfp.read(TESTFN), self.data)
self.assertEqual(zipfp.read("another.name"), self.data)
self.assertEqual(zipfp.read("strfile"), self.data)
# Print the ZIP directory
fp = io.StringIO()
zipfp.printdir(fp)
directory = fp.getvalue()
lines = directory.splitlines()
self.assertEqual(len(lines), 4) # Number of files + header
self.assertIn('File Name', lines[0])
self.assertIn('Modified', lines[0])
self.assertIn('Size', lines[0])
fn, date, time_, size = lines[1].split()
self.assertEqual(fn, 'another.name')
self.assertTrue(time.strptime(date, '%Y-%m-%d'))
self.assertTrue(time.strptime(time_, '%H:%M:%S'))
self.assertEqual(size, str(len(self.data)))
# Check the namelist
names = zipfp.namelist()
self.assertEqual(len(names), 3)
self.assertIn(TESTFN, names)
self.assertIn("another.name", names)
self.assertIn("strfile", names)
# Check infolist
infos = zipfp.infolist()
names = [i.filename for i in infos]
self.assertEqual(len(names), 3)
self.assertIn(TESTFN, names)
self.assertIn("another.name", names)
self.assertIn("strfile", names)
for i in infos:
self.assertEqual(i.file_size, len(self.data))
# check getinfo
for nm in (TESTFN, "another.name", "strfile"):
info = zipfp.getinfo(nm)
self.assertEqual(info.filename, nm)
self.assertEqual(info.file_size, len(self.data))
# Check that testzip doesn't raise an exception
zipfp.testzip()
def test_basic(self):
for f in get_files(self):
self.zip_test(f, self.compression)
def tearDown(self):
zipfile.ZIP64_LIMIT = self._limit
unlink(TESTFN)
unlink(TESTFN2)
class StoredTestZip64InSmallFiles(AbstractTestZip64InSmallFiles,
unittest.TestCase):
compression = zipfile.ZIP_STORED
def large_file_exception_test(self, f, compression):
with zipfile.ZipFile(f, "w", compression, allowZip64=False) as zipfp:
self.assertRaises(zipfile.LargeZipFile,
zipfp.write, TESTFN, "another.name")
def large_file_exception_test2(self, f, compression):
with zipfile.ZipFile(f, "w", compression, allowZip64=False) as zipfp:
self.assertRaises(zipfile.LargeZipFile,
zipfp.writestr, "another.name", self.data)
def test_large_file_exception(self):
for f in get_files(self):
self.large_file_exception_test(f, zipfile.ZIP_STORED)
self.large_file_exception_test2(f, zipfile.ZIP_STORED)
def test_absolute_arcnames(self):
with zipfile.ZipFile(TESTFN2, "w", zipfile.ZIP_STORED,
allowZip64=True) as zipfp:
zipfp.write(TESTFN, "/absolute")
with zipfile.ZipFile(TESTFN2, "r", zipfile.ZIP_STORED) as zipfp:
self.assertEqual(zipfp.namelist(), ["absolute"])
@requires_zlib
class DeflateTestZip64InSmallFiles(AbstractTestZip64InSmallFiles,
unittest.TestCase):
compression = zipfile.ZIP_DEFLATED
@requires_bz2
class Bzip2TestZip64InSmallFiles(AbstractTestZip64InSmallFiles,
unittest.TestCase):
compression = zipfile.ZIP_BZIP2
@requires_lzma
class LzmaTestZip64InSmallFiles(AbstractTestZip64InSmallFiles,
unittest.TestCase):
compression = zipfile.ZIP_LZMA
class PyZipFileTests(unittest.TestCase):
def assertCompiledIn(self, name, namelist):
if name + 'o' not in namelist:
self.assertIn(name + 'c', namelist)
def test_write_pyfile(self):
with TemporaryFile() as t, zipfile.PyZipFile(t, "w") as zipfp:
fn = __file__
if fn.endswith('.pyc') or fn.endswith('.pyo'):
path_split = fn.split(os.sep)
if os.altsep is not None:
path_split.extend(fn.split(os.altsep))
if '__pycache__' in path_split:
fn = importlib.util.source_from_cache(fn)
else:
fn = fn[:-1]
zipfp.writepy(fn)
bn = os.path.basename(fn)
self.assertNotIn(bn, zipfp.namelist())
self.assertCompiledIn(bn, zipfp.namelist())
with TemporaryFile() as t, zipfile.PyZipFile(t, "w") as zipfp:
fn = __file__
if fn.endswith(('.pyc', '.pyo')):
fn = fn[:-1]
zipfp.writepy(fn, "testpackage")
bn = "%s/%s" % ("testpackage", os.path.basename(fn))
self.assertNotIn(bn, zipfp.namelist())
self.assertCompiledIn(bn, zipfp.namelist())
def test_write_python_package(self):
import email
packagedir = os.path.dirname(email.__file__)
with TemporaryFile() as t, zipfile.PyZipFile(t, "w") as zipfp:
zipfp.writepy(packagedir)
# Check for a couple of modules at different levels of the
# hierarchy
names = zipfp.namelist()
self.assertCompiledIn('email/__init__.py', names)
self.assertCompiledIn('email/mime/text.py', names)
def test_write_filtered_python_package(self):
import test
packagedir = os.path.dirname(test.__file__)
with TemporaryFile() as t, zipfile.PyZipFile(t, "w") as zipfp:
# first make sure that the test folder gives error messages
# (on the badsyntax_... files)
with captured_stdout() as reportSIO:
zipfp.writepy(packagedir)
reportStr = reportSIO.getvalue()
self.assertTrue('SyntaxError' in reportStr)
# then check that the filter works on the whole package
with captured_stdout() as reportSIO:
zipfp.writepy(packagedir, filterfunc=lambda whatever: False)
reportStr = reportSIO.getvalue()
self.assertTrue('SyntaxError' not in reportStr)
# then check that the filter works on individual files
with captured_stdout() as reportSIO, self.assertWarns(UserWarning):
zipfp.writepy(packagedir, filterfunc=lambda fn:
'bad' not in fn)
reportStr = reportSIO.getvalue()
if reportStr:
print(reportStr)
self.assertTrue('SyntaxError' not in reportStr)
def test_write_with_optimization(self):
import email
packagedir = os.path.dirname(email.__file__)
# use .pyc if running test in optimization mode,
# use .pyo if running test in debug mode
optlevel = 1 if __debug__ else 0
ext = '.pyo' if optlevel == 1 else '.pyc'
with TemporaryFile() as t, \
zipfile.PyZipFile(t, "w", optimize=optlevel) as zipfp:
zipfp.writepy(packagedir)
names = zipfp.namelist()
self.assertIn('email/__init__' + ext, names)
self.assertIn('email/mime/text' + ext, names)
def test_write_python_directory(self):
os.mkdir(TESTFN2)
try:
with open(os.path.join(TESTFN2, "mod1.py"), "w") as fp:
fp.write("print(42)\n")
with open(os.path.join(TESTFN2, "mod2.py"), "w") as fp:
fp.write("print(42 * 42)\n")
with open(os.path.join(TESTFN2, "mod2.txt"), "w") as fp:
fp.write("bla bla bla\n")
with TemporaryFile() as t, zipfile.PyZipFile(t, "w") as zipfp:
zipfp.writepy(TESTFN2)
names = zipfp.namelist()
self.assertCompiledIn('mod1.py', names)
self.assertCompiledIn('mod2.py', names)
self.assertNotIn('mod2.txt', names)
finally:
rmtree(TESTFN2)
def test_write_python_directory_filtered(self):
os.mkdir(TESTFN2)
try:
with open(os.path.join(TESTFN2, "mod1.py"), "w") as fp:
fp.write("print(42)\n")
with open(os.path.join(TESTFN2, "mod2.py"), "w") as fp:
fp.write("print(42 * 42)\n")
with TemporaryFile() as t, zipfile.PyZipFile(t, "w") as zipfp:
zipfp.writepy(TESTFN2, filterfunc=lambda fn:
not fn.endswith('mod2.py'))
names = zipfp.namelist()
self.assertCompiledIn('mod1.py', names)
self.assertNotIn('mod2.py', names)
finally:
rmtree(TESTFN2)
def test_write_non_pyfile(self):
with TemporaryFile() as t, zipfile.PyZipFile(t, "w") as zipfp:
with open(TESTFN, 'w') as f:
f.write('most definitely not a python file')
self.assertRaises(RuntimeError, zipfp.writepy, TESTFN)
unlink(TESTFN)
def test_write_pyfile_bad_syntax(self):
os.mkdir(TESTFN2)
try:
with open(os.path.join(TESTFN2, "mod1.py"), "w") as fp:
fp.write("Bad syntax in python file\n")
with TemporaryFile() as t, zipfile.PyZipFile(t, "w") as zipfp:
# syntax errors are printed to stdout
with captured_stdout() as s:
zipfp.writepy(os.path.join(TESTFN2, "mod1.py"))
self.assertIn("SyntaxError", s.getvalue())
# as it will not have compiled the python file, it will
# include the .py file not .pyc or .pyo
names = zipfp.namelist()
self.assertIn('mod1.py', names)
self.assertNotIn('mod1.pyc', names)
self.assertNotIn('mod1.pyo', names)
finally:
rmtree(TESTFN2)
class ExtractTests(unittest.TestCase):
def test_extract(self):
with zipfile.ZipFile(TESTFN2, "w", zipfile.ZIP_STORED) as zipfp:
for fpath, fdata in SMALL_TEST_DATA:
zipfp.writestr(fpath, fdata)
with zipfile.ZipFile(TESTFN2, "r") as zipfp:
for fpath, fdata in SMALL_TEST_DATA:
writtenfile = zipfp.extract(fpath)
# make sure it was written to the right place
correctfile = os.path.join(os.getcwd(), fpath)
correctfile = os.path.normpath(correctfile)
self.assertEqual(writtenfile, correctfile)
# make sure correct data is in correct file
with open(writtenfile, "rb") as f:
self.assertEqual(fdata.encode(), f.read())
unlink(writtenfile)
# remove the test file subdirectories
rmtree(os.path.join(os.getcwd(), 'ziptest2dir'))
def test_extract_all(self):
with zipfile.ZipFile(TESTFN2, "w", zipfile.ZIP_STORED) as zipfp:
for fpath, fdata in SMALL_TEST_DATA:
zipfp.writestr(fpath, fdata)
with zipfile.ZipFile(TESTFN2, "r") as zipfp:
zipfp.extractall()
for fpath, fdata in SMALL_TEST_DATA:
outfile = os.path.join(os.getcwd(), fpath)
with open(outfile, "rb") as f:
self.assertEqual(fdata.encode(), f.read())
unlink(outfile)
# remove the test file subdirectories
rmtree(os.path.join(os.getcwd(), 'ziptest2dir'))
def check_file(self, filename, content):
self.assertTrue(os.path.isfile(filename))
with open(filename, 'rb') as f:
self.assertEqual(f.read(), content)
def test_sanitize_windows_name(self):
san = zipfile.ZipFile._sanitize_windows_name
# Passing pathsep in allows this test to work regardless of platform.
self.assertEqual(san(r',,?,C:,foo,bar/z', ','), r'_,C_,foo,bar/z')
self.assertEqual(san(r'a\b,c<d>e|f"g?h*i', ','), r'a\b,c_d_e_f_g_h_i')
self.assertEqual(san('../../foo../../ba..r', '/'), r'foo/ba..r')
def test_extract_hackers_arcnames_common_cases(self):
common_hacknames = [
('../foo/bar', 'foo/bar'),
('foo/../bar', 'foo/bar'),
('foo/../../bar', 'foo/bar'),
('foo/bar/..', 'foo/bar'),
('./../foo/bar', 'foo/bar'),
('/foo/bar', 'foo/bar'),
('/foo/../bar', 'foo/bar'),
('/foo/../../bar', 'foo/bar'),
]
self._test_extract_hackers_arcnames(common_hacknames)
@unittest.skipIf(os.path.sep != '\\', 'Requires \\ as path separator.')
def test_extract_hackers_arcnames_windows_only(self):
"""Test combination of path fixing and windows name sanitization."""
windows_hacknames = [
(r'..\foo\bar', 'foo/bar'),
(r'..\/foo\/bar', 'foo/bar'),
(r'foo/\..\/bar', 'foo/bar'),
(r'foo\/../\bar', 'foo/bar'),
(r'C:foo/bar', 'foo/bar'),
(r'C:/foo/bar', 'foo/bar'),
(r'C://foo/bar', 'foo/bar'),
(r'C:\foo\bar', 'foo/bar'),
(r'//conky/mountpoint/foo/bar', 'foo/bar'),
(r'\\conky\mountpoint\foo\bar', 'foo/bar'),
(r'///conky/mountpoint/foo/bar', 'conky/mountpoint/foo/bar'),
(r'\\\conky\mountpoint\foo\bar', 'conky/mountpoint/foo/bar'),
(r'//conky//mountpoint/foo/bar', 'conky/mountpoint/foo/bar'),
(r'\\conky\\mountpoint\foo\bar', 'conky/mountpoint/foo/bar'),
(r'//?/C:/foo/bar', 'foo/bar'),
(r'\\?\C:\foo\bar', 'foo/bar'),
(r'C:/../C:/foo/bar', 'C_/foo/bar'),
(r'a:b\c<d>e|f"g?h*i', 'b/c_d_e_f_g_h_i'),
('../../foo../../ba..r', 'foo/ba..r'),
]
self._test_extract_hackers_arcnames(windows_hacknames)
@unittest.skipIf(os.path.sep != '/', r'Requires / as path separator.')
def test_extract_hackers_arcnames_posix_only(self):
posix_hacknames = [
('//foo/bar', 'foo/bar'),
('../../foo../../ba..r', 'foo../ba..r'),
(r'foo/..\bar', r'foo/..\bar'),
]
self._test_extract_hackers_arcnames(posix_hacknames)
def _test_extract_hackers_arcnames(self, hacknames):
for arcname, fixedname in hacknames:
content = b'foobar' + arcname.encode()
with zipfile.ZipFile(TESTFN2, 'w', zipfile.ZIP_STORED) as zipfp:
zinfo = zipfile.ZipInfo()
# preserve backslashes
zinfo.filename = arcname
zinfo.external_attr = 0o600 << 16
zipfp.writestr(zinfo, content)
arcname = arcname.replace(os.sep, "/")
targetpath = os.path.join('target', 'subdir', 'subsub')
correctfile = os.path.join(targetpath, *fixedname.split('/'))
with zipfile.ZipFile(TESTFN2, 'r') as zipfp:
writtenfile = zipfp.extract(arcname, targetpath)
self.assertEqual(writtenfile, correctfile,
msg='extract %r: %r != %r' %
(arcname, writtenfile, correctfile))
self.check_file(correctfile, content)
rmtree('target')
with zipfile.ZipFile(TESTFN2, 'r') as zipfp:
zipfp.extractall(targetpath)
self.check_file(correctfile, content)
rmtree('target')
correctfile = os.path.join(os.getcwd(), *fixedname.split('/'))
with zipfile.ZipFile(TESTFN2, 'r') as zipfp:
writtenfile = zipfp.extract(arcname)
self.assertEqual(writtenfile, correctfile,
msg="extract %r" % arcname)
self.check_file(correctfile, content)
rmtree(fixedname.split('/')[0])
with zipfile.ZipFile(TESTFN2, 'r') as zipfp:
zipfp.extractall()
self.check_file(correctfile, content)
rmtree(fixedname.split('/')[0])
unlink(TESTFN2)
class OtherTests(unittest.TestCase):
def test_open_via_zip_info(self):
# Create the ZIP archive
with zipfile.ZipFile(TESTFN2, "w", zipfile.ZIP_STORED) as zipfp:
zipfp.writestr("name", "foo")
with self.assertWarns(UserWarning):
zipfp.writestr("name", "bar")
self.assertEqual(zipfp.namelist(), ["name"] * 2)
with zipfile.ZipFile(TESTFN2, "r") as zipfp:
infos = zipfp.infolist()
data = b""
for info in infos:
with zipfp.open(info) as zipopen:
data += zipopen.read()
self.assertIn(data, {b"foobar", b"barfoo"})
data = b""
for info in infos:
data += zipfp.read(info)
self.assertIn(data, {b"foobar", b"barfoo"})
def test_universal_deprecation(self):
f = io.BytesIO()
with zipfile.ZipFile(f, "w") as zipfp:
zipfp.writestr('spam.txt', b'ababagalamaga')
with zipfile.ZipFile(f, "r") as zipfp:
for mode in 'U', 'rU':
with self.assertWarns(DeprecationWarning):
zipopen = zipfp.open('spam.txt', mode)
zipopen.close()
def test_universal_readaheads(self):
f = io.BytesIO()
data = b'a\r\n' * 16 * 1024
with zipfile.ZipFile(f, 'w', zipfile.ZIP_STORED) as zipfp:
zipfp.writestr(TESTFN, data)
data2 = b''
with zipfile.ZipFile(f, 'r') as zipfp, \
openU(zipfp, TESTFN) as zipopen:
for line in zipopen:
data2 += line
self.assertEqual(data, data2.replace(b'\n', b'\r\n'))
def test_writestr_extended_local_header_issue1202(self):
with zipfile.ZipFile(TESTFN2, 'w') as orig_zip:
for data in 'abcdefghijklmnop':
zinfo = zipfile.ZipInfo(data)
zinfo.flag_bits |= 0x08 # Include an extended local header.
orig_zip.writestr(zinfo, data)
def test_close(self):
"""Check that the zipfile is closed after the 'with' block."""
with zipfile.ZipFile(TESTFN2, "w") as zipfp:
for fpath, fdata in SMALL_TEST_DATA:
zipfp.writestr(fpath, fdata)
self.assertIsNotNone(zipfp.fp, 'zipfp is not open')
self.assertIsNone(zipfp.fp, 'zipfp is not closed')
with zipfile.ZipFile(TESTFN2, "r") as zipfp:
self.assertIsNotNone(zipfp.fp, 'zipfp is not open')
self.assertIsNone(zipfp.fp, 'zipfp is not closed')
def test_close_on_exception(self):
"""Check that the zipfile is closed if an exception is raised in the
'with' block."""
with zipfile.ZipFile(TESTFN2, "w") as zipfp:
for fpath, fdata in SMALL_TEST_DATA:
zipfp.writestr(fpath, fdata)
try:
with zipfile.ZipFile(TESTFN2, "r") as zipfp2:
raise zipfile.BadZipFile()
except zipfile.BadZipFile:
self.assertIsNone(zipfp2.fp, 'zipfp is not closed')
def test_unsupported_version(self):
# File has an extract_version of 120
data = (b'PK\x03\x04x\x00\x00\x00\x00\x00!p\xa1@\x00\x00\x00\x00\x00\x00'
b'\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00xPK\x01\x02x\x03x\x00\x00\x00\x00'
b'\x00!p\xa1@\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00'
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x80\x01\x00\x00\x00\x00xPK\x05\x06'
b'\x00\x00\x00\x00\x01\x00\x01\x00/\x00\x00\x00\x1f\x00\x00\x00\x00\x00')
self.assertRaises(NotImplementedError, zipfile.ZipFile,
io.BytesIO(data), 'r')
@requires_zlib
def test_read_unicode_filenames(self):
# bug #10801
fname = findfile('zip_cp437_header.zip')
with zipfile.ZipFile(fname) as zipfp:
for name in zipfp.namelist():
zipfp.open(name).close()
def test_write_unicode_filenames(self):
with zipfile.ZipFile(TESTFN, "w") as zf:
zf.writestr("foo.txt", "Test for unicode filename")
zf.writestr("\xf6.txt", "Test for unicode filename")
self.assertIsInstance(zf.infolist()[0].filename, str)
with zipfile.ZipFile(TESTFN, "r") as zf:
self.assertEqual(zf.filelist[0].filename, "foo.txt")
self.assertEqual(zf.filelist[1].filename, "\xf6.txt")
def test_create_non_existent_file_for_append(self):
if os.path.exists(TESTFN):
os.unlink(TESTFN)
filename = 'testfile.txt'
content = b'hello, world. this is some content.'
try:
with zipfile.ZipFile(TESTFN, 'a') as zf:
zf.writestr(filename, content)
except OSError:
self.fail('Could not append data to a non-existent zip file.')
self.assertTrue(os.path.exists(TESTFN))
with zipfile.ZipFile(TESTFN, 'r') as zf:
self.assertEqual(zf.read(filename), content)
def test_close_erroneous_file(self):
# This test checks that the ZipFile constructor closes the file object
# it opens if there's an error in the file. If it doesn't, the
# traceback holds a reference to the ZipFile object and, indirectly,
# the file object.
# On Windows, this causes the os.unlink() call to fail because the
# underlying file is still open. This is SF bug #412214.
#
with open(TESTFN, "w") as fp:
fp.write("this is not a legal zip file\n")
try:
zf = zipfile.ZipFile(TESTFN)
except zipfile.BadZipFile:
pass
def test_is_zip_erroneous_file(self):
"""Check that is_zipfile() correctly identifies non-zip files."""
# - passing a filename
with open(TESTFN, "w") as fp:
fp.write("this is not a legal zip file\n")
self.assertFalse(zipfile.is_zipfile(TESTFN))
# - passing a file object
with open(TESTFN, "rb") as fp:
self.assertFalse(zipfile.is_zipfile(fp))
# - passing a file-like object
fp = io.BytesIO()
fp.write(b"this is not a legal zip file\n")
self.assertFalse(zipfile.is_zipfile(fp))
fp.seek(0, 0)
self.assertFalse(zipfile.is_zipfile(fp))
def test_damaged_zipfile(self):
"""Check that zipfiles with missing bytes at the end raise BadZipFile."""
# - Create a valid zip file
fp = io.BytesIO()
with zipfile.ZipFile(fp, mode="w") as zipf:
zipf.writestr("foo.txt", b"O, for a Muse of Fire!")
zipfiledata = fp.getvalue()
# - Now create copies of it missing the last N bytes and make sure
# a BadZipFile exception is raised when we try to open it
for N in range(len(zipfiledata)):
fp = io.BytesIO(zipfiledata[:N])
self.assertRaises(zipfile.BadZipFile, zipfile.ZipFile, fp)
def test_is_zip_valid_file(self):
"""Check that is_zipfile() correctly identifies zip files."""
# - passing a filename
with zipfile.ZipFile(TESTFN, mode="w") as zipf:
zipf.writestr("foo.txt", b"O, for a Muse of Fire!")
self.assertTrue(zipfile.is_zipfile(TESTFN))
# - passing a file object
with open(TESTFN, "rb") as fp:
self.assertTrue(zipfile.is_zipfile(fp))
fp.seek(0, 0)
zip_contents = fp.read()
# - passing a file-like object
fp = io.BytesIO()
fp.write(zip_contents)
self.assertTrue(zipfile.is_zipfile(fp))
fp.seek(0, 0)
self.assertTrue(zipfile.is_zipfile(fp))
def test_non_existent_file_raises_OSError(self):
# make sure we don't raise an AttributeError when a partially-constructed
# ZipFile instance is finalized; this tests for regression on SF tracker
# bug #403871.
# The bug we're testing for caused an AttributeError to be raised
# when a ZipFile instance was created for a file that did not
# exist; the .fp member was not initialized but was needed by the
# __del__() method. Since the AttributeError is in the __del__(),
# it is ignored, but the user should be sufficiently annoyed by
# the message on the output that regression will be noticed
# quickly.
self.assertRaises(OSError, zipfile.ZipFile, TESTFN)
def test_empty_file_raises_BadZipFile(self):
f = open(TESTFN, 'w')
f.close()
self.assertRaises(zipfile.BadZipFile, zipfile.ZipFile, TESTFN)
with open(TESTFN, 'w') as fp:
fp.write("short file")
self.assertRaises(zipfile.BadZipFile, zipfile.ZipFile, TESTFN)
def test_closed_zip_raises_RuntimeError(self):
"""Verify that testzip() doesn't swallow inappropriate exceptions."""
data = io.BytesIO()
with zipfile.ZipFile(data, mode="w") as zipf:
zipf.writestr("foo.txt", "O, for a Muse of Fire!")
# This is correct; calling .read on a closed ZipFile should raise
# a RuntimeError, and so should calling .testzip. An earlier
# version of .testzip would swallow this exception (and any other)
# and report that the first file in the archive was corrupt.
self.assertRaises(RuntimeError, zipf.read, "foo.txt")
self.assertRaises(RuntimeError, zipf.open, "foo.txt")
self.assertRaises(RuntimeError, zipf.testzip)
self.assertRaises(RuntimeError, zipf.writestr, "bogus.txt", "bogus")
with open(TESTFN, 'w') as f:
f.write('zipfile test data')
self.assertRaises(RuntimeError, zipf.write, TESTFN)
def test_bad_constructor_mode(self):
"""Check that bad modes passed to ZipFile constructor are caught."""
self.assertRaises(RuntimeError, zipfile.ZipFile, TESTFN, "q")
def test_bad_open_mode(self):
"""Check that bad modes passed to ZipFile.open are caught."""
with zipfile.ZipFile(TESTFN, mode="w") as zipf:
zipf.writestr("foo.txt", "O, for a Muse of Fire!")
with zipfile.ZipFile(TESTFN, mode="r") as zipf:
# read the data to make sure the file is there
zipf.read("foo.txt")
self.assertRaises(RuntimeError, zipf.open, "foo.txt", "q")
def test_read0(self):
"""Check that calling read(0) on a ZipExtFile object returns an empty
string and doesn't advance file pointer."""
with zipfile.ZipFile(TESTFN, mode="w") as zipf:
zipf.writestr("foo.txt", "O, for a Muse of Fire!")
# read the data to make sure the file is there
with zipf.open("foo.txt") as f:
for i in range(FIXEDTEST_SIZE):
self.assertEqual(f.read(0), b'')
self.assertEqual(f.read(), b"O, for a Muse of Fire!")
def test_open_non_existent_item(self):
"""Check that attempting to call open() for an item that doesn't
exist in the archive raises a RuntimeError."""
with zipfile.ZipFile(TESTFN, mode="w") as zipf:
self.assertRaises(KeyError, zipf.open, "foo.txt", "r")
def test_bad_compression_mode(self):
"""Check that bad compression methods passed to ZipFile.open are
caught."""
self.assertRaises(RuntimeError, zipfile.ZipFile, TESTFN, "w", -1)
def test_unsupported_compression(self):
# data is declared as shrunk, but actually deflated
data = (b'PK\x03\x04.\x00\x00\x00\x01\x00\xe4C\xa1@\x00\x00\x00'
b'\x00\x02\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00x\x03\x00PK\x01'
b'\x02.\x03.\x00\x00\x00\x01\x00\xe4C\xa1@\x00\x00\x00\x00\x02\x00\x00'
b'\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
b'\x80\x01\x00\x00\x00\x00xPK\x05\x06\x00\x00\x00\x00\x01\x00\x01\x00'
b'/\x00\x00\x00!\x00\x00\x00\x00\x00')
with zipfile.ZipFile(io.BytesIO(data), 'r') as zipf:
self.assertRaises(NotImplementedError, zipf.open, 'x')
def test_null_byte_in_filename(self):
"""Check that a filename containing a null byte is properly
terminated."""
with zipfile.ZipFile(TESTFN, mode="w") as zipf:
zipf.writestr("foo.txt\x00qqq", b"O, for a Muse of Fire!")
self.assertEqual(zipf.namelist(), ['foo.txt'])
def test_struct_sizes(self):
"""Check that ZIP internal structure sizes are calculated correctly."""
self.assertEqual(zipfile.sizeEndCentDir, 22)
self.assertEqual(zipfile.sizeCentralDir, 46)
self.assertEqual(zipfile.sizeEndCentDir64, 56)
self.assertEqual(zipfile.sizeEndCentDir64Locator, 20)
def test_comments(self):
"""Check that comments on the archive are handled properly."""
# check default comment is empty
with zipfile.ZipFile(TESTFN, mode="w") as zipf:
self.assertEqual(zipf.comment, b'')
zipf.writestr("foo.txt", "O, for a Muse of Fire!")
with zipfile.ZipFile(TESTFN, mode="r") as zipfr:
self.assertEqual(zipfr.comment, b'')
# check a simple short comment
comment = b'Bravely taking to his feet, he beat a very brave retreat.'
with zipfile.ZipFile(TESTFN, mode="w") as zipf:
zipf.comment = comment
zipf.writestr("foo.txt", "O, for a Muse of Fire!")
with zipfile.ZipFile(TESTFN, mode="r") as zipfr:
self.assertEqual(zipf.comment, comment)
# check a comment of max length
comment2 = ''.join(['%d' % (i**3 % 10) for i in range((1 << 16)-1)])
comment2 = comment2.encode("ascii")
with zipfile.ZipFile(TESTFN, mode="w") as zipf:
zipf.comment = comment2
zipf.writestr("foo.txt", "O, for a Muse of Fire!")
with zipfile.ZipFile(TESTFN, mode="r") as zipfr:
self.assertEqual(zipfr.comment, comment2)
# check a comment that is too long is truncated
with zipfile.ZipFile(TESTFN, mode="w") as zipf:
with self.assertWarns(UserWarning):
zipf.comment = comment2 + b'oops'
zipf.writestr("foo.txt", "O, for a Muse of Fire!")
with zipfile.ZipFile(TESTFN, mode="r") as zipfr:
self.assertEqual(zipfr.comment, comment2)
# check that comments are correctly modified in append mode
with zipfile.ZipFile(TESTFN,mode="w") as zipf:
zipf.comment = b"original comment"
zipf.writestr("foo.txt", "O, for a Muse of Fire!")
with zipfile.ZipFile(TESTFN,mode="a") as zipf:
zipf.comment = b"an updated comment"
with zipfile.ZipFile(TESTFN,mode="r") as zipf:
self.assertEqual(zipf.comment, b"an updated comment")
# check that comments are correctly shortened in append mode
with zipfile.ZipFile(TESTFN,mode="w") as zipf:
zipf.comment = b"original comment that's longer"
zipf.writestr("foo.txt", "O, for a Muse of Fire!")
with zipfile.ZipFile(TESTFN,mode="a") as zipf:
zipf.comment = b"shorter comment"
with zipfile.ZipFile(TESTFN,mode="r") as zipf:
self.assertEqual(zipf.comment, b"shorter comment")
def test_unicode_comment(self):
with zipfile.ZipFile(TESTFN, "w", zipfile.ZIP_STORED) as zipf:
zipf.writestr("foo.txt", "O, for a Muse of Fire!")
with self.assertRaises(TypeError):
zipf.comment = "this is an error"
def test_change_comment_in_empty_archive(self):
with zipfile.ZipFile(TESTFN, "a", zipfile.ZIP_STORED) as zipf:
self.assertFalse(zipf.filelist)
zipf.comment = b"this is a comment"
with zipfile.ZipFile(TESTFN, "r") as zipf:
self.assertEqual(zipf.comment, b"this is a comment")
def test_change_comment_in_nonempty_archive(self):
with zipfile.ZipFile(TESTFN, "w", zipfile.ZIP_STORED) as zipf:
zipf.writestr("foo.txt", "O, for a Muse of Fire!")
with zipfile.ZipFile(TESTFN, "a", zipfile.ZIP_STORED) as zipf:
self.assertTrue(zipf.filelist)
zipf.comment = b"this is a comment"
with zipfile.ZipFile(TESTFN, "r") as zipf:
self.assertEqual(zipf.comment, b"this is a comment")
def test_empty_zipfile(self):
# Check that creating a file in 'w' or 'a' mode and closing without
# adding any files to the archives creates a valid empty ZIP file
zipf = zipfile.ZipFile(TESTFN, mode="w")
zipf.close()
try:
zipf = zipfile.ZipFile(TESTFN, mode="r")
except zipfile.BadZipFile:
self.fail("Unable to create empty ZIP file in 'w' mode")
zipf = zipfile.ZipFile(TESTFN, mode="a")
zipf.close()
try:
zipf = zipfile.ZipFile(TESTFN, mode="r")
except:
self.fail("Unable to create empty ZIP file in 'a' mode")
def test_open_empty_file(self):
# Issue 1710703: Check that opening a file with less than 22 bytes
# raises a BadZipFile exception (rather than the previously unhelpful
# OSError)
f = open(TESTFN, 'w')
f.close()
self.assertRaises(zipfile.BadZipFile, zipfile.ZipFile, TESTFN, 'r')
def test_create_zipinfo_before_1980(self):
self.assertRaises(ValueError,
zipfile.ZipInfo, 'seventies', (1979, 1, 1, 0, 0, 0))
def test_zipfile_with_short_extra_field(self):
"""If an extra field in the header is less than 4 bytes, skip it."""
zipdata = (
b'PK\x03\x04\x14\x00\x00\x00\x00\x00\x93\x9b\xad@\x8b\x9e'
b'\xd9\xd3\x01\x00\x00\x00\x01\x00\x00\x00\x03\x00\x03\x00ab'
b'c\x00\x00\x00APK\x01\x02\x14\x03\x14\x00\x00\x00\x00'
b'\x00\x93\x9b\xad@\x8b\x9e\xd9\xd3\x01\x00\x00\x00\x01\x00\x00'
b'\x00\x03\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\xa4\x81\x00'
b'\x00\x00\x00abc\x00\x00PK\x05\x06\x00\x00\x00\x00'
b'\x01\x00\x01\x003\x00\x00\x00%\x00\x00\x00\x00\x00'
)
with zipfile.ZipFile(io.BytesIO(zipdata), 'r') as zipf:
# testzip returns the name of the first corrupt file, or None
self.assertIsNone(zipf.testzip())
def tearDown(self):
unlink(TESTFN)
unlink(TESTFN2)
class AbstractBadCrcTests:
def test_testzip_with_bad_crc(self):
"""Tests that files with bad CRCs return their name from testzip."""
zipdata = self.zip_with_bad_crc
with zipfile.ZipFile(io.BytesIO(zipdata), mode="r") as zipf:
# testzip returns the name of the first corrupt file, or None
self.assertEqual('afile', zipf.testzip())
def test_read_with_bad_crc(self):
"""Tests that files with bad CRCs raise a BadZipFile exception when read."""
zipdata = self.zip_with_bad_crc
# Using ZipFile.read()
with zipfile.ZipFile(io.BytesIO(zipdata), mode="r") as zipf:
self.assertRaises(zipfile.BadZipFile, zipf.read, 'afile')
# Using ZipExtFile.read()
with zipfile.ZipFile(io.BytesIO(zipdata), mode="r") as zipf:
with zipf.open('afile', 'r') as corrupt_file:
self.assertRaises(zipfile.BadZipFile, corrupt_file.read)
# Same with small reads (in order to exercise the buffering logic)
with zipfile.ZipFile(io.BytesIO(zipdata), mode="r") as zipf:
with zipf.open('afile', 'r') as corrupt_file:
corrupt_file.MIN_READ_SIZE = 2
with self.assertRaises(zipfile.BadZipFile):
while corrupt_file.read(2):
pass
class StoredBadCrcTests(AbstractBadCrcTests, unittest.TestCase):
compression = zipfile.ZIP_STORED
zip_with_bad_crc = (
b'PK\003\004\024\0\0\0\0\0 \213\212;:r'
b'\253\377\f\0\0\0\f\0\0\0\005\0\0\000af'
b'ilehello,AworldP'
b'K\001\002\024\003\024\0\0\0\0\0 \213\212;:'
b'r\253\377\f\0\0\0\f\0\0\0\005\0\0\0\0'
b'\0\0\0\0\0\0\0\200\001\0\0\0\000afi'
b'lePK\005\006\0\0\0\0\001\0\001\0003\000'
b'\0\0/\0\0\0\0\0')
@requires_zlib
class DeflateBadCrcTests(AbstractBadCrcTests, unittest.TestCase):
compression = zipfile.ZIP_DEFLATED
zip_with_bad_crc = (
b'PK\x03\x04\x14\x00\x00\x00\x08\x00n}\x0c=FA'
b'KE\x10\x00\x00\x00n\x00\x00\x00\x05\x00\x00\x00af'
b'ile\xcbH\xcd\xc9\xc9W(\xcf/\xcaI\xc9\xa0'
b'=\x13\x00PK\x01\x02\x14\x03\x14\x00\x00\x00\x08\x00n'
b'}\x0c=FAKE\x10\x00\x00\x00n\x00\x00\x00\x05'
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x80\x01\x00\x00\x00'
b'\x00afilePK\x05\x06\x00\x00\x00\x00\x01\x00'
b'\x01\x003\x00\x00\x003\x00\x00\x00\x00\x00')
@requires_bz2
class Bzip2BadCrcTests(AbstractBadCrcTests, unittest.TestCase):
compression = zipfile.ZIP_BZIP2
zip_with_bad_crc = (
b'PK\x03\x04\x14\x03\x00\x00\x0c\x00nu\x0c=FA'
b'KE8\x00\x00\x00n\x00\x00\x00\x05\x00\x00\x00af'
b'ileBZh91AY&SY\xd4\xa8\xca'
b'\x7f\x00\x00\x0f\x11\x80@\x00\x06D\x90\x80 \x00 \xa5'
b'P\xd9!\x03\x03\x13\x13\x13\x89\xa9\xa9\xc2u5:\x9f'
b'\x8b\xb9"\x9c(HjTe?\x80PK\x01\x02\x14'
b'\x03\x14\x03\x00\x00\x0c\x00nu\x0c=FAKE8'
b'\x00\x00\x00n\x00\x00\x00\x05\x00\x00\x00\x00\x00\x00\x00\x00'
b'\x00 \x80\x80\x81\x00\x00\x00\x00afilePK'
b'\x05\x06\x00\x00\x00\x00\x01\x00\x01\x003\x00\x00\x00[\x00'
b'\x00\x00\x00\x00')
@requires_lzma
class LzmaBadCrcTests(AbstractBadCrcTests, unittest.TestCase):
compression = zipfile.ZIP_LZMA
zip_with_bad_crc = (
b'PK\x03\x04\x14\x03\x00\x00\x0e\x00nu\x0c=FA'
b'KE\x1b\x00\x00\x00n\x00\x00\x00\x05\x00\x00\x00af'
b'ile\t\x04\x05\x00]\x00\x00\x00\x04\x004\x19I'
b'\xee\x8d\xe9\x17\x89:3`\tq!.8\x00PK'
b'\x01\x02\x14\x03\x14\x03\x00\x00\x0e\x00nu\x0c=FA'
b'KE\x1b\x00\x00\x00n\x00\x00\x00\x05\x00\x00\x00\x00\x00'
b'\x00\x00\x00\x00 \x80\x80\x81\x00\x00\x00\x00afil'
b'ePK\x05\x06\x00\x00\x00\x00\x01\x00\x01\x003\x00\x00'
b'\x00>\x00\x00\x00\x00\x00')
class DecryptionTests(unittest.TestCase):
"""Check that ZIP decryption works. Since the library does not
support encryption at the moment, we use a pre-generated encrypted
ZIP file."""
data = (
b'PK\x03\x04\x14\x00\x01\x00\x00\x00n\x92i.#y\xef?&\x00\x00\x00\x1a\x00'
b'\x00\x00\x08\x00\x00\x00test.txt\xfa\x10\xa0gly|\xfa-\xc5\xc0=\xf9y'
b'\x18\xe0\xa8r\xb3Z}Lg\xbc\xae\xf9|\x9b\x19\xe4\x8b\xba\xbb)\x8c\xb0\xdbl'
b'PK\x01\x02\x14\x00\x14\x00\x01\x00\x00\x00n\x92i.#y\xef?&\x00\x00\x00'
b'\x1a\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x01\x00 \x00\xb6\x81'
b'\x00\x00\x00\x00test.txtPK\x05\x06\x00\x00\x00\x00\x01\x00\x01\x006\x00'
b'\x00\x00L\x00\x00\x00\x00\x00' )
data2 = (
b'PK\x03\x04\x14\x00\t\x00\x08\x00\xcf}38xu\xaa\xb2\x14\x00\x00\x00\x00\x02'
b'\x00\x00\x04\x00\x15\x00zeroUT\t\x00\x03\xd6\x8b\x92G\xda\x8b\x92GUx\x04'
b'\x00\xe8\x03\xe8\x03\xc7<M\xb5a\xceX\xa3Y&\x8b{oE\xd7\x9d\x8c\x98\x02\xc0'
b'PK\x07\x08xu\xaa\xb2\x14\x00\x00\x00\x00\x02\x00\x00PK\x01\x02\x17\x03'
b'\x14\x00\t\x00\x08\x00\xcf}38xu\xaa\xb2\x14\x00\x00\x00\x00\x02\x00\x00'
b'\x04\x00\r\x00\x00\x00\x00\x00\x00\x00\x00\x00\xa4\x81\x00\x00\x00\x00ze'
b'roUT\x05\x00\x03\xd6\x8b\x92GUx\x00\x00PK\x05\x06\x00\x00\x00\x00\x01'
b'\x00\x01\x00?\x00\x00\x00[\x00\x00\x00\x00\x00' )
plain = b'zipfile.py encryption test'
plain2 = b'\x00'*512
def setUp(self):
with open(TESTFN, "wb") as fp:
fp.write(self.data)
self.zip = zipfile.ZipFile(TESTFN, "r")
with open(TESTFN2, "wb") as fp:
fp.write(self.data2)
self.zip2 = zipfile.ZipFile(TESTFN2, "r")
def tearDown(self):
self.zip.close()
os.unlink(TESTFN)
self.zip2.close()
os.unlink(TESTFN2)
def test_no_password(self):
# Reading the encrypted file without password
# must generate a RunTime exception
self.assertRaises(RuntimeError, self.zip.read, "test.txt")
self.assertRaises(RuntimeError, self.zip2.read, "zero")
def test_bad_password(self):
self.zip.setpassword(b"perl")
self.assertRaises(RuntimeError, self.zip.read, "test.txt")
self.zip2.setpassword(b"perl")
self.assertRaises(RuntimeError, self.zip2.read, "zero")
@requires_zlib
def test_good_password(self):
self.zip.setpassword(b"python")
self.assertEqual(self.zip.read("test.txt"), self.plain)
self.zip2.setpassword(b"12345")
self.assertEqual(self.zip2.read("zero"), self.plain2)
def test_unicode_password(self):
self.assertRaises(TypeError, self.zip.setpassword, "unicode")
self.assertRaises(TypeError, self.zip.read, "test.txt", "python")
self.assertRaises(TypeError, self.zip.open, "test.txt", pwd="python")
self.assertRaises(TypeError, self.zip.extract, "test.txt", pwd="python")
class AbstractTestsWithRandomBinaryFiles:
@classmethod
def setUpClass(cls):
datacount = randint(16, 64)*1024 + randint(1, 1024)
cls.data = b''.join(struct.pack('<f', random()*randint(-1000, 1000))
for i in range(datacount))
def setUp(self):
# Make a source file with some lines
with open(TESTFN, "wb") as fp:
fp.write(self.data)
def tearDown(self):
unlink(TESTFN)
unlink(TESTFN2)
def make_test_archive(self, f, compression):
# Create the ZIP archive
with zipfile.ZipFile(f, "w", compression) as zipfp:
zipfp.write(TESTFN, "another.name")
zipfp.write(TESTFN, TESTFN)
def zip_test(self, f, compression):
self.make_test_archive(f, compression)
# Read the ZIP archive
with zipfile.ZipFile(f, "r", compression) as zipfp:
testdata = zipfp.read(TESTFN)
self.assertEqual(len(testdata), len(self.data))
self.assertEqual(testdata, self.data)
self.assertEqual(zipfp.read("another.name"), self.data)
def test_read(self):
for f in get_files(self):
self.zip_test(f, self.compression)
def zip_open_test(self, f, compression):
self.make_test_archive(f, compression)
# Read the ZIP archive
with zipfile.ZipFile(f, "r", compression) as zipfp:
zipdata1 = []
with zipfp.open(TESTFN) as zipopen1:
while True:
read_data = zipopen1.read(256)
if not read_data:
break
zipdata1.append(read_data)
zipdata2 = []
with zipfp.open("another.name") as zipopen2:
while True:
read_data = zipopen2.read(256)
if not read_data:
break
zipdata2.append(read_data)
testdata1 = b''.join(zipdata1)
self.assertEqual(len(testdata1), len(self.data))
self.assertEqual(testdata1, self.data)
testdata2 = b''.join(zipdata2)
self.assertEqual(len(testdata2), len(self.data))
self.assertEqual(testdata2, self.data)
def test_open(self):
for f in get_files(self):
self.zip_open_test(f, self.compression)
def zip_random_open_test(self, f, compression):
self.make_test_archive(f, compression)
# Read the ZIP archive
with zipfile.ZipFile(f, "r", compression) as zipfp:
zipdata1 = []
with zipfp.open(TESTFN) as zipopen1:
while True:
read_data = zipopen1.read(randint(1, 1024))
if not read_data:
break
zipdata1.append(read_data)
testdata = b''.join(zipdata1)
self.assertEqual(len(testdata), len(self.data))
self.assertEqual(testdata, self.data)
def test_random_open(self):
for f in get_files(self):
self.zip_random_open_test(f, self.compression)
class StoredTestsWithRandomBinaryFiles(AbstractTestsWithRandomBinaryFiles,
unittest.TestCase):
compression = zipfile.ZIP_STORED
@requires_zlib
class DeflateTestsWithRandomBinaryFiles(AbstractTestsWithRandomBinaryFiles,
unittest.TestCase):
compression = zipfile.ZIP_DEFLATED
@requires_bz2
class Bzip2TestsWithRandomBinaryFiles(AbstractTestsWithRandomBinaryFiles,
unittest.TestCase):
compression = zipfile.ZIP_BZIP2
@requires_lzma
class LzmaTestsWithRandomBinaryFiles(AbstractTestsWithRandomBinaryFiles,
unittest.TestCase):
compression = zipfile.ZIP_LZMA
@requires_zlib
class TestsWithMultipleOpens(unittest.TestCase):
def setUp(self):
# Create the ZIP archive
with zipfile.ZipFile(TESTFN2, "w", zipfile.ZIP_DEFLATED) as zipfp:
zipfp.writestr('ones', '1'*FIXEDTEST_SIZE)
zipfp.writestr('twos', '2'*FIXEDTEST_SIZE)
def test_same_file(self):
# Verify that (when the ZipFile is in control of creating file objects)
# multiple open() calls can be made without interfering with each other.
with zipfile.ZipFile(TESTFN2, mode="r") as zipf:
with zipf.open('ones') as zopen1, zipf.open('ones') as zopen2:
data1 = zopen1.read(500)
data2 = zopen2.read(500)
data1 += zopen1.read(500)
data2 += zopen2.read(500)
self.assertEqual(data1, data2)
def test_different_file(self):
# Verify that (when the ZipFile is in control of creating file objects)
# multiple open() calls can be made without interfering with each other.
with zipfile.ZipFile(TESTFN2, mode="r") as zipf:
with zipf.open('ones') as zopen1, zipf.open('twos') as zopen2:
data1 = zopen1.read(500)
data2 = zopen2.read(500)
data1 += zopen1.read(500)
data2 += zopen2.read(500)
self.assertEqual(data1, b'1'*FIXEDTEST_SIZE)
self.assertEqual(data2, b'2'*FIXEDTEST_SIZE)
def test_interleaved(self):
# Verify that (when the ZipFile is in control of creating file objects)
# multiple open() calls can be made without interfering with each other.
with zipfile.ZipFile(TESTFN2, mode="r") as zipf:
with zipf.open('ones') as zopen1, zipf.open('twos') as zopen2:
data1 = zopen1.read(500)
data2 = zopen2.read(500)
data1 += zopen1.read(500)
data2 += zopen2.read(500)
self.assertEqual(data1, b'1'*FIXEDTEST_SIZE)
self.assertEqual(data2, b'2'*FIXEDTEST_SIZE)
def tearDown(self):
unlink(TESTFN2)
class TestWithDirectory(unittest.TestCase):
def setUp(self):
os.mkdir(TESTFN2)
def test_extract_dir(self):
with zipfile.ZipFile(findfile("zipdir.zip")) as zipf:
zipf.extractall(TESTFN2)
self.assertTrue(os.path.isdir(os.path.join(TESTFN2, "a")))
self.assertTrue(os.path.isdir(os.path.join(TESTFN2, "a", "b")))
self.assertTrue(os.path.exists(os.path.join(TESTFN2, "a", "b", "c")))
def test_bug_6050(self):
# Extraction should succeed if directories already exist
os.mkdir(os.path.join(TESTFN2, "a"))
self.test_extract_dir()
def test_store_dir(self):
os.mkdir(os.path.join(TESTFN2, "x"))
zipf = zipfile.ZipFile(TESTFN, "w")
zipf.write(os.path.join(TESTFN2, "x"), "x")
self.assertTrue(zipf.filelist[0].filename.endswith("x/"))
def tearDown(self):
rmtree(TESTFN2)
if os.path.exists(TESTFN):
unlink(TESTFN)
class AbstractUniversalNewlineTests:
@classmethod
def setUpClass(cls):
cls.line_gen = [bytes("Test of zipfile line %d." % i, "ascii")
for i in range(FIXEDTEST_SIZE)]
cls.seps = (b'\r', b'\r\n', b'\n')
cls.arcdata = {}
for n, s in enumerate(cls.seps):
cls.arcdata[s] = s.join(cls.line_gen) + s
def setUp(self):
self.arcfiles = {}
for n, s in enumerate(self.seps):
self.arcfiles[s] = '%s-%d' % (TESTFN, n)
with open(self.arcfiles[s], "wb") as f:
f.write(self.arcdata[s])
def make_test_archive(self, f, compression):
# Create the ZIP archive
with zipfile.ZipFile(f, "w", compression) as zipfp:
for fn in self.arcfiles.values():
zipfp.write(fn, fn)
def read_test(self, f, compression):
self.make_test_archive(f, compression)
# Read the ZIP archive
with zipfile.ZipFile(f, "r") as zipfp:
for sep, fn in self.arcfiles.items():
with openU(zipfp, fn) as fp:
zipdata = fp.read()
self.assertEqual(self.arcdata[sep], zipdata)
def test_read(self):
for f in get_files(self):
self.read_test(f, self.compression)
def readline_read_test(self, f, compression):
self.make_test_archive(f, compression)
# Read the ZIP archive
with zipfile.ZipFile(f, "r") as zipfp:
for sep, fn in self.arcfiles.items():
with openU(zipfp, fn) as zipopen:
data = b''
while True:
read = zipopen.readline()
if not read:
break
data += read
read = zipopen.read(5)
if not read:
break
data += read
self.assertEqual(data, self.arcdata[b'\n'])
def test_readline_read(self):
for f in get_files(self):
self.readline_read_test(f, self.compression)
def readline_test(self, f, compression):
self.make_test_archive(f, compression)
# Read the ZIP archive
with zipfile.ZipFile(f, "r") as zipfp:
for sep, fn in self.arcfiles.items():
with openU(zipfp, fn) as zipopen:
for line in self.line_gen:
linedata = zipopen.readline()
self.assertEqual(linedata, line + b'\n')
def test_readline(self):
for f in get_files(self):
self.readline_test(f, self.compression)
def readlines_test(self, f, compression):
self.make_test_archive(f, compression)
# Read the ZIP archive
with zipfile.ZipFile(f, "r") as zipfp:
for sep, fn in self.arcfiles.items():
with openU(zipfp, fn) as fp:
ziplines = fp.readlines()
for line, zipline in zip(self.line_gen, ziplines):
self.assertEqual(zipline, line + b'\n')
def test_readlines(self):
for f in get_files(self):
self.readlines_test(f, self.compression)
def iterlines_test(self, f, compression):
self.make_test_archive(f, compression)
# Read the ZIP archive
with zipfile.ZipFile(f, "r") as zipfp:
for sep, fn in self.arcfiles.items():
with openU(zipfp, fn) as fp:
for line, zipline in zip(self.line_gen, fp):
self.assertEqual(zipline, line + b'\n')
def test_iterlines(self):
for f in get_files(self):
self.iterlines_test(f, self.compression)
def tearDown(self):
for sep, fn in self.arcfiles.items():
unlink(fn)
unlink(TESTFN)
unlink(TESTFN2)
class StoredUniversalNewlineTests(AbstractUniversalNewlineTests,
unittest.TestCase):
compression = zipfile.ZIP_STORED
@requires_zlib
class DeflateUniversalNewlineTests(AbstractUniversalNewlineTests,
unittest.TestCase):
compression = zipfile.ZIP_DEFLATED
@requires_bz2
class Bzip2UniversalNewlineTests(AbstractUniversalNewlineTests,
unittest.TestCase):
compression = zipfile.ZIP_BZIP2
@requires_lzma
class LzmaUniversalNewlineTests(AbstractUniversalNewlineTests,
unittest.TestCase):
compression = zipfile.ZIP_LZMA
if __name__ == "__main__":
unittest.main()
| lgpl-3.0 | -5,728,849,412,639,061,000 | 38.507011 | 92 | 0.579875 | false |
cyli/volatility | volatility/plugins/linux/netstat.py | 10 | 2404 | # Volatility
# Copyright (C) 2007-2013 Volatility Foundation
#
# This file is part of Volatility.
#
# Volatility is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# Volatility is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Volatility. If not, see <http://www.gnu.org/licenses/>.
#
"""
@author: Andrew Case
@license: GNU General Public License 2.0
@contact: [email protected]
@organization:
"""
import socket
import volatility.obj as obj
import volatility.plugins.linux.common as linux_common
import volatility.plugins.linux.lsof as linux_lsof
import volatility.plugins.linux.pslist as linux_pslist
class linux_netstat(linux_pslist.linux_pslist):
"""Lists open sockets"""
def __init__(self, config, *args, **kwargs):
linux_pslist.linux_pslist.__init__(self, config, *args, **kwargs)
self._config.add_option('IGNORE_UNIX', short_option = 'U', default = None, help = 'ignore unix sockets', action = 'store_true')
# its a socket!
def render_text(self, outfd, data):
linux_common.set_plugin_members(self)
if not self.addr_space.profile.has_type("inet_sock"):
# ancient (2.6.9) centos kernels do not have inet_sock in debug info
raise AttributeError, "Given profile does not have inet_sock, please file a bug if the kernel version is > 2.6.11"
for task in data:
for ents in task.netstat():
if ents[0] == socket.AF_INET:
(_, proto, saddr, sport, daddr, dport, state) = ents[1]
outfd.write("{0:8s} {1:<16}:{2:>5} {3:<16}:{4:>5} {5:<15s} {6:>17s}/{7:<5d}\n".format(proto, saddr, sport, daddr, dport, state, task.comm, task.pid))
elif ents[0] == socket.AF_UNIX and not self._config.IGNORE_UNIX:
(name, inum) = ents[1]
outfd.write("UNIX {0:<8d} {1:>17s}/{2:<5d} {3:s}\n".format(inum, task.comm, task.pid, name))
| gpl-2.0 | 7,423,932,609,651,793,000 | 40.448276 | 169 | 0.64975 | false |
vdmann/cse-360-image-hosting-website | lib/python2.7/site-packages/django/contrib/gis/management/commands/inspectdb.py | 315 | 1466 | from django.core.management.commands.inspectdb import Command as InspectDBCommand
class Command(InspectDBCommand):
db_module = 'django.contrib.gis.db'
gis_tables = {}
def get_field_type(self, connection, table_name, row):
field_type, field_params, field_notes = super(Command, self).get_field_type(connection, table_name, row)
if field_type == 'GeometryField':
geo_col = row[0]
# Getting a more specific field type and any additional parameters
# from the `get_geometry_type` routine for the spatial backend.
field_type, geo_params = connection.introspection.get_geometry_type(table_name, geo_col)
field_params.update(geo_params)
# Adding the table name and column to the `gis_tables` dictionary, this
# allows us to track which tables need a GeoManager.
if table_name in self.gis_tables:
self.gis_tables[table_name].append(geo_col)
else:
self.gis_tables[table_name] = [geo_col]
return field_type, field_params, field_notes
def get_meta(self, table_name):
meta_lines = super(Command, self).get_meta(table_name)
if table_name in self.gis_tables:
# If the table is a geographic one, then we need make
# GeoManager the default manager for the model.
meta_lines.insert(0, ' objects = models.GeoManager()')
return meta_lines
| mit | 5,457,492,270,504,847,000 | 49.551724 | 112 | 0.639154 | false |
Bysmyyr/blink-crosswalk | Tools/Scripts/webkitpy/style/checkers/png.py | 170 | 3914 | # Copyright (C) 2012 Balazs Ankes ([email protected]) University of Szeged
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Supports checking WebKit style in png files."""
import os
import re
from webkitpy.common import checksvnconfigfile
from webkitpy.common import read_checksum_from_png
from webkitpy.common.system.systemhost import SystemHost
from webkitpy.common.checkout.scm.detection import SCMDetector
class PNGChecker(object):
"""Check svn:mime-type for checking style"""
categories = set(['image/png'])
def __init__(self, file_path, handle_style_error, scm=None, host=None):
self._file_path = file_path
self._handle_style_error = handle_style_error
self._host = host or SystemHost()
self._fs = self._host.filesystem
self._detector = scm or SCMDetector(self._fs, self._host.executive).detect_scm_system(self._fs.getcwd())
def check(self, inline=None):
errorstr = ""
config_file_path = ""
detection = self._detector.display_name()
if self._fs.exists(self._file_path) and self._file_path.endswith("-expected.png"):
with self._fs.open_binary_file_for_reading(self._file_path) as filehandle:
if not read_checksum_from_png.read_checksum(filehandle):
self._handle_style_error(0, 'image/png', 5, "Image lacks a checksum. Generate pngs using run-webkit-tests to ensure they have a checksum.")
if detection == "git":
(file_missing, autoprop_missing, png_missing) = checksvnconfigfile.check(self._host, self._fs)
config_file_path = checksvnconfigfile.config_file_path(self._host, self._fs)
if file_missing:
self._handle_style_error(0, 'image/png', 5, "There is no SVN config file. (%s)" % config_file_path)
elif autoprop_missing and png_missing:
self._handle_style_error(0, 'image/png', 5, checksvnconfigfile.errorstr_autoprop(config_file_path) + checksvnconfigfile.errorstr_png(config_file_path))
elif autoprop_missing:
self._handle_style_error(0, 'image/png', 5, checksvnconfigfile.errorstr_autoprop(config_file_path))
elif png_missing:
self._handle_style_error(0, 'image/png', 5, checksvnconfigfile.errorstr_png(config_file_path))
elif detection == "svn":
prop_get = self._detector.propget("svn:mime-type", self._file_path)
if prop_get != "image/png":
errorstr = "Set the svn:mime-type property (svn propset svn:mime-type image/png %s)." % self._file_path
self._handle_style_error(0, 'image/png', 5, errorstr)
| bsd-3-clause | -3,351,296,925,016,274,400 | 51.186667 | 167 | 0.695708 | false |
matthewoliver/swift | test/unit/obj/test_expirer.py | 2 | 34828 | # Copyright (c) 2011 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from time import time
from unittest import main, TestCase
from test.unit import FakeRing, mocked_http_conn, debug_logger
from tempfile import mkdtemp
from shutil import rmtree
from collections import defaultdict
from copy import deepcopy
import mock
import six
from six.moves import urllib
from swift.common import internal_client, utils, swob
from swift.common.utils import Timestamp
from swift.obj import expirer
def not_random():
return 0.5
last_not_sleep = 0
def not_sleep(seconds):
global last_not_sleep
last_not_sleep = seconds
class FakeInternalClient(object):
container_ring = FakeRing()
def __init__(self, aco_dict):
"""
:param aco_dict: A dict of account ,container, object that
FakeInternalClient can return when each method called. Each account
has container name dict, and each container dict has object name
list in the container.
e.g. {'account1': {
'container1: ['obj1', 'obj2', 'obj3'],
'container2: [],
},
'account2': {},
}
"""
self.aco_dict = defaultdict(dict)
self.aco_dict.update(aco_dict)
def get_account_info(self, account):
acc_dict = self.aco_dict[account]
container_count = len(acc_dict)
obj_count = sum(len(objs) for objs in acc_dict.values())
return container_count, obj_count
def iter_containers(self, account, prefix=''):
acc_dict = self.aco_dict[account]
return sorted([{'name': six.text_type(container)} for container in
acc_dict if container.startswith(prefix)])
def delete_container(*a, **kw):
pass
def iter_objects(self, account, container):
acc_dict = self.aco_dict[account]
obj_iter = acc_dict.get(container, [])
return [{'name': six.text_type(obj)} for obj in obj_iter]
def make_request(*a, **kw):
pass
class TestObjectExpirer(TestCase):
maxDiff = None
internal_client = None
def setUp(self):
global not_sleep
self.old_loadapp = internal_client.loadapp
self.old_sleep = internal_client.sleep
internal_client.loadapp = lambda *a, **kw: None
internal_client.sleep = not_sleep
self.rcache = mkdtemp()
self.conf = {'recon_cache_path': self.rcache}
self.logger = debug_logger('test-expirer')
self.past_time = str(int(time() - 86400))
self.future_time = str(int(time() + 86400))
# Dummy task queue for test
self.fake_swift = FakeInternalClient({
'.expiring_objects': {
# this task container will be checked
self.past_time: [
# tasks ready for execution
self.past_time + '-a0/c0/o0',
self.past_time + '-a1/c1/o1',
self.past_time + '-a2/c2/o2',
self.past_time + '-a3/c3/o3',
self.past_time + '-a4/c4/o4',
self.past_time + '-a5/c5/o5',
self.past_time + '-a6/c6/o6',
self.past_time + '-a7/c7/o7',
# task objects for unicode test
self.past_time + u'-a8/c8/o8\u2661',
self.past_time + u'-a9/c9/o9\xf8',
# this task will be skipped
self.future_time + '-a10/c10/o10'],
# this task container will be skipped
self.future_time: [
self.future_time + '-a11/c11/o11']}
})
self.expirer = expirer.ObjectExpirer(self.conf, logger=self.logger,
swift=self.fake_swift)
# target object paths which should be expirerd now
self.expired_target_path_list = [
'a0/c0/o0', 'a1/c1/o1', 'a2/c2/o2', 'a3/c3/o3', 'a4/c4/o4',
'a5/c5/o5', 'a6/c6/o6', 'a7/c7/o7',
'a8/c8/o8\xe2\x99\xa1', 'a9/c9/o9\xc3\xb8',
]
def tearDown(self):
rmtree(self.rcache)
internal_client.sleep = self.old_sleep
internal_client.loadapp = self.old_loadapp
def test_get_process_values_from_kwargs(self):
x = expirer.ObjectExpirer({})
vals = {
'processes': 5,
'process': 1,
}
x.get_process_values(vals)
self.assertEqual(x.processes, 5)
self.assertEqual(x.process, 1)
def test_get_process_values_from_config(self):
vals = {
'processes': 5,
'process': 1,
}
x = expirer.ObjectExpirer(vals)
x.get_process_values({})
self.assertEqual(x.processes, 5)
self.assertEqual(x.process, 1)
def test_get_process_values_negative_process(self):
vals = {
'processes': 5,
'process': -1,
}
# from config
x = expirer.ObjectExpirer(vals)
expected_msg = 'process must be an integer greater' \
' than or equal to 0'
with self.assertRaises(ValueError) as ctx:
x.get_process_values({})
self.assertEqual(str(ctx.exception), expected_msg)
# from kwargs
x = expirer.ObjectExpirer({})
with self.assertRaises(ValueError) as ctx:
x.get_process_values(vals)
self.assertEqual(str(ctx.exception), expected_msg)
def test_get_process_values_negative_processes(self):
vals = {
'processes': -5,
'process': 1,
}
# from config
x = expirer.ObjectExpirer(vals)
expected_msg = 'processes must be an integer greater' \
' than or equal to 0'
with self.assertRaises(ValueError) as ctx:
x.get_process_values({})
self.assertEqual(str(ctx.exception), expected_msg)
# from kwargs
x = expirer.ObjectExpirer({})
with self.assertRaises(ValueError) as ctx:
x.get_process_values(vals)
self.assertEqual(str(ctx.exception), expected_msg)
def test_get_process_values_process_greater_than_processes(self):
vals = {
'processes': 5,
'process': 7,
}
# from config
x = expirer.ObjectExpirer(vals)
expected_msg = 'process must be less than processes'
with self.assertRaises(ValueError) as ctx:
x.get_process_values({})
self.assertEqual(str(ctx.exception), expected_msg)
# from kwargs
x = expirer.ObjectExpirer({})
with self.assertRaises(ValueError) as ctx:
x.get_process_values(vals)
self.assertEqual(str(ctx.exception), expected_msg)
def test_get_process_values_process_equal_to_processes(self):
vals = {
'processes': 5,
'process': 5,
}
# from config
x = expirer.ObjectExpirer(vals)
expected_msg = 'process must be less than processes'
with self.assertRaises(ValueError) as ctx:
x.get_process_values({})
self.assertEqual(str(ctx.exception), expected_msg)
# from kwargs
x = expirer.ObjectExpirer({})
with self.assertRaises(ValueError) as ctx:
x.get_process_values(vals)
self.assertEqual(str(ctx.exception), expected_msg)
def test_init_concurrency_too_small(self):
conf = {
'concurrency': 0,
}
self.assertRaises(ValueError, expirer.ObjectExpirer, conf)
conf = {
'concurrency': -1,
}
self.assertRaises(ValueError, expirer.ObjectExpirer, conf)
def test_process_based_concurrency(self):
class ObjectExpirer(expirer.ObjectExpirer):
def __init__(self, conf, swift):
super(ObjectExpirer, self).__init__(conf, swift=swift)
self.processes = 3
self.deleted_objects = {}
def delete_object(self, target_path, delete_timestamp,
task_account, task_container, task_object):
if task_container not in self.deleted_objects:
self.deleted_objects[task_container] = set()
self.deleted_objects[task_container].add(task_object)
x = ObjectExpirer(self.conf, swift=self.fake_swift)
deleted_objects = defaultdict(set)
for i in range(3):
x.process = i
# reset progress so we know we don't double-up work among processes
x.deleted_objects = defaultdict(set)
x.run_once()
for task_container, deleted in x.deleted_objects.items():
self.assertFalse(deleted_objects[task_container] & deleted)
deleted_objects[task_container] |= deleted
# sort for comparison
deleted_objects = {
con: sorted(o_set) for con, o_set in deleted_objects.items()}
expected = {
self.past_time: [
self.past_time + '-' + target_path
for target_path in self.expired_target_path_list]}
self.assertEqual(deleted_objects, expected)
def test_delete_object(self):
x = expirer.ObjectExpirer({}, logger=self.logger)
actual_obj = 'actual_obj'
timestamp = int(time())
reclaim_ts = timestamp - x.reclaim_age
account = 'account'
container = 'container'
obj = 'obj'
http_exc = {
resp_code:
internal_client.UnexpectedResponse(
str(resp_code), swob.HTTPException(status=resp_code))
for resp_code in {404, 412, 500}
}
exc_other = Exception()
def check_call_to_delete_object(exc, ts, should_pop):
x.logger.clear()
start_reports = x.report_objects
with mock.patch.object(x, 'delete_actual_object',
side_effect=exc) as delete_actual:
with mock.patch.object(x, 'pop_queue') as pop_queue:
x.delete_object(actual_obj, ts, account, container, obj)
delete_actual.assert_called_once_with(actual_obj, ts)
log_lines = x.logger.get_lines_for_level('error')
if should_pop:
pop_queue.assert_called_once_with(account, container, obj)
self.assertEqual(start_reports + 1, x.report_objects)
self.assertFalse(log_lines)
else:
self.assertFalse(pop_queue.called)
self.assertEqual(start_reports, x.report_objects)
self.assertEqual(1, len(log_lines))
if isinstance(exc, internal_client.UnexpectedResponse):
self.assertEqual(
log_lines[0],
'Unexpected response while deleting object '
'account container obj: %s' % exc.resp.status_int)
else:
self.assertTrue(log_lines[0].startswith(
'Exception while deleting object '
'account container obj'))
# verify pop_queue logic on exceptions
for exc, ts, should_pop in [(None, timestamp, True),
(http_exc[404], timestamp, False),
(http_exc[412], timestamp, False),
(http_exc[500], reclaim_ts, False),
(exc_other, reclaim_ts, False),
(http_exc[404], reclaim_ts, True),
(http_exc[412], reclaim_ts, True)]:
try:
check_call_to_delete_object(exc, ts, should_pop)
except AssertionError as err:
self.fail("Failed on %r at %f: %s" % (exc, ts, err))
def test_report(self):
x = expirer.ObjectExpirer({}, logger=self.logger)
x.report()
self.assertEqual(x.logger.get_lines_for_level('info'), [])
x.logger._clear()
x.report(final=True)
self.assertTrue(
'completed' in str(x.logger.get_lines_for_level('info')))
self.assertTrue(
'so far' not in str(x.logger.get_lines_for_level('info')))
x.logger._clear()
x.report_last_time = time() - x.report_interval
x.report()
self.assertTrue(
'completed' not in str(x.logger.get_lines_for_level('info')))
self.assertTrue(
'so far' in str(x.logger.get_lines_for_level('info')))
def test_parse_task_obj(self):
x = expirer.ObjectExpirer(self.conf, logger=self.logger)
def assert_parse_task_obj(task_obj, expected_delete_at,
expected_account, expected_container,
expected_obj):
delete_at, account, container, obj = x.parse_task_obj(task_obj)
self.assertEqual(delete_at, expected_delete_at)
self.assertEqual(account, expected_account)
self.assertEqual(container, expected_container)
self.assertEqual(obj, expected_obj)
assert_parse_task_obj('0000-a/c/o', 0, 'a', 'c', 'o')
assert_parse_task_obj('0001-a/c/o', 1, 'a', 'c', 'o')
assert_parse_task_obj('1000-a/c/o', 1000, 'a', 'c', 'o')
assert_parse_task_obj('0000-acc/con/obj', 0, 'acc', 'con', 'obj')
def make_task(self, delete_at, target):
return {
'task_account': '.expiring_objects',
'task_container': delete_at,
'task_object': delete_at + '-' + target,
'delete_timestamp': Timestamp(delete_at),
'target_path': target,
}
def test_round_robin_order(self):
x = expirer.ObjectExpirer(self.conf, logger=self.logger)
task_con_obj_list = [
# objects in 0000 timestamp container
self.make_task('0000', 'a/c0/o0'),
self.make_task('0000', 'a/c0/o1'),
# objects in 0001 timestamp container
self.make_task('0001', 'a/c1/o0'),
self.make_task('0001', 'a/c1/o1'),
# objects in 0002 timestamp container
self.make_task('0002', 'a/c2/o0'),
self.make_task('0002', 'a/c2/o1'),
]
result = list(x.round_robin_order(task_con_obj_list))
# sorted by popping one object to delete for each target_container
expected = [
self.make_task('0000', 'a/c0/o0'),
self.make_task('0001', 'a/c1/o0'),
self.make_task('0002', 'a/c2/o0'),
self.make_task('0000', 'a/c0/o1'),
self.make_task('0001', 'a/c1/o1'),
self.make_task('0002', 'a/c2/o1'),
]
self.assertEqual(expected, result)
# task containers have some task objects with invalid target paths
task_con_obj_list = [
# objects in 0000 timestamp container
self.make_task('0000', 'invalid0'),
self.make_task('0000', 'a/c0/o0'),
self.make_task('0000', 'a/c0/o1'),
# objects in 0001 timestamp container
self.make_task('0001', 'a/c1/o0'),
self.make_task('0001', 'invalid1'),
self.make_task('0001', 'a/c1/o1'),
# objects in 0002 timestamp container
self.make_task('0002', 'a/c2/o0'),
self.make_task('0002', 'a/c2/o1'),
self.make_task('0002', 'invalid2'),
]
result = list(x.round_robin_order(task_con_obj_list))
# the invalid task objects are ignored
expected = [
self.make_task('0000', 'a/c0/o0'),
self.make_task('0001', 'a/c1/o0'),
self.make_task('0002', 'a/c2/o0'),
self.make_task('0000', 'a/c0/o1'),
self.make_task('0001', 'a/c1/o1'),
self.make_task('0002', 'a/c2/o1'),
]
self.assertEqual(expected, result)
# for a given target container, tasks won't necessarily all go in
# the same timestamp container
task_con_obj_list = [
# objects in 0000 timestamp container
self.make_task('0000', 'a/c0/o0'),
self.make_task('0000', 'a/c0/o1'),
self.make_task('0000', 'a/c2/o2'),
self.make_task('0000', 'a/c2/o3'),
# objects in 0001 timestamp container
self.make_task('0001', 'a/c0/o2'),
self.make_task('0001', 'a/c0/o3'),
self.make_task('0001', 'a/c1/o0'),
self.make_task('0001', 'a/c1/o1'),
# objects in 0002 timestamp container
self.make_task('0002', 'a/c2/o0'),
self.make_task('0002', 'a/c2/o1'),
]
result = list(x.round_robin_order(task_con_obj_list))
# so we go around popping by *target* container, not *task* container
expected = [
self.make_task('0000', 'a/c0/o0'),
self.make_task('0001', 'a/c1/o0'),
self.make_task('0000', 'a/c2/o2'),
self.make_task('0000', 'a/c0/o1'),
self.make_task('0001', 'a/c1/o1'),
self.make_task('0000', 'a/c2/o3'),
self.make_task('0001', 'a/c0/o2'),
self.make_task('0002', 'a/c2/o0'),
self.make_task('0001', 'a/c0/o3'),
self.make_task('0002', 'a/c2/o1'),
]
self.assertEqual(expected, result)
# all of the work to be done could be for different target containers
task_con_obj_list = [
# objects in 0000 timestamp container
self.make_task('0000', 'a/c0/o'),
self.make_task('0000', 'a/c1/o'),
self.make_task('0000', 'a/c2/o'),
self.make_task('0000', 'a/c3/o'),
# objects in 0001 timestamp container
self.make_task('0001', 'a/c4/o'),
self.make_task('0001', 'a/c5/o'),
self.make_task('0001', 'a/c6/o'),
self.make_task('0001', 'a/c7/o'),
# objects in 0002 timestamp container
self.make_task('0002', 'a/c8/o'),
self.make_task('0002', 'a/c9/o'),
]
result = list(x.round_robin_order(task_con_obj_list))
# in which case, we kind of hammer the task containers
self.assertEqual(task_con_obj_list, result)
def test_hash_mod(self):
x = expirer.ObjectExpirer(self.conf, logger=self.logger)
mod_count = [0, 0, 0]
for i in range(1000):
name = 'obj%d' % i
mod = x.hash_mod(name, 3)
mod_count[mod] += 1
# 1000 names are well shuffled
self.assertGreater(mod_count[0], 300)
self.assertGreater(mod_count[1], 300)
self.assertGreater(mod_count[2], 300)
def test_iter_task_accounts_to_expire(self):
x = expirer.ObjectExpirer(self.conf, logger=self.logger)
results = [_ for _ in x.iter_task_accounts_to_expire()]
self.assertEqual(results, [('.expiring_objects', 0, 1)])
self.conf['processes'] = '2'
self.conf['process'] = '1'
x = expirer.ObjectExpirer(self.conf, logger=self.logger)
results = [_ for _ in x.iter_task_accounts_to_expire()]
self.assertEqual(results, [('.expiring_objects', 1, 2)])
def test_delete_at_time_of_task_container(self):
x = expirer.ObjectExpirer(self.conf, logger=self.logger)
self.assertEqual(x.delete_at_time_of_task_container('0000'), 0)
self.assertEqual(x.delete_at_time_of_task_container('0001'), 1)
self.assertEqual(x.delete_at_time_of_task_container('1000'), 1000)
def test_run_once_nothing_to_do(self):
x = expirer.ObjectExpirer(self.conf, logger=self.logger)
x.swift = 'throw error because a string does not have needed methods'
x.run_once()
self.assertEqual(x.logger.get_lines_for_level('error'),
["Unhandled exception: "])
log_args, log_kwargs = x.logger.log_dict['error'][0]
self.assertEqual(str(log_kwargs['exc_info'][1]),
"'str' object has no attribute 'get_account_info'")
def test_run_once_calls_report(self):
with mock.patch.object(self.expirer, 'pop_queue',
lambda a, c, o: None):
self.expirer.run_once()
self.assertEqual(
self.expirer.logger.get_lines_for_level('info'), [
'Pass beginning for task account .expiring_objects; '
'2 possible containers; 12 possible objects',
'Pass completed in 0s; 10 objects expired',
])
def test_skip_task_account_without_task_container(self):
fake_swift = FakeInternalClient({
# task account has no containers
'.expiring_objects': dict()
})
x = expirer.ObjectExpirer(self.conf, logger=self.logger,
swift=fake_swift)
x.run_once()
self.assertEqual(
x.logger.get_lines_for_level('info'), [
'Pass completed in 0s; 0 objects expired',
])
def test_iter_task_to_expire(self):
# In this test, all tasks are assigned to the tested expirer
my_index = 0
divisor = 1
task_account_container_list = [('.expiring_objects', self.past_time)]
expected = [
self.make_task(self.past_time, target_path)
for target_path in self.expired_target_path_list]
self.assertEqual(
list(self.expirer.iter_task_to_expire(
task_account_container_list, my_index, divisor)),
expected)
# the task queue has invalid task object
invalid_aco_dict = deepcopy(self.fake_swift.aco_dict)
invalid_aco_dict['.expiring_objects'][self.past_time].insert(
0, self.past_time + '-invalid0')
invalid_aco_dict['.expiring_objects'][self.past_time].insert(
5, self.past_time + '-invalid1')
invalid_fake_swift = FakeInternalClient(invalid_aco_dict)
x = expirer.ObjectExpirer(self.conf, logger=self.logger,
swift=invalid_fake_swift)
# but the invalid tasks are skipped
self.assertEqual(
list(x.iter_task_to_expire(
task_account_container_list, my_index, divisor)),
expected)
def test_run_once_unicode_problem(self):
requests = []
def capture_requests(ipaddr, port, method, path, *args, **kwargs):
requests.append((method, path))
# 3 DELETE requests for each 10 executed task objects to pop_queue
code_list = [200] * 3 * 10
with mocked_http_conn(*code_list, give_connect=capture_requests):
self.expirer.run_once()
self.assertEqual(len(requests), 30)
def test_container_timestamp_break(self):
with mock.patch.object(self.fake_swift, 'iter_objects') as mock_method:
self.expirer.run_once()
# iter_objects is called only for past_time, not future_time
self.assertEqual(mock_method.call_args_list,
[mock.call('.expiring_objects', self.past_time)])
def test_object_timestamp_break(self):
with mock.patch.object(self.expirer, 'delete_actual_object') \
as mock_method, \
mock.patch.object(self.expirer, 'pop_queue'):
self.expirer.run_once()
# executed tasks are with past time
self.assertEqual(
mock_method.call_args_list,
[mock.call(target_path, self.past_time)
for target_path in self.expired_target_path_list])
def test_failed_delete_keeps_entry(self):
def deliberately_blow_up(actual_obj, timestamp):
raise Exception('failed to delete actual object')
# any tasks are not done
with mock.patch.object(self.expirer, 'delete_actual_object',
deliberately_blow_up), \
mock.patch.object(self.expirer, 'pop_queue') as mock_method:
self.expirer.run_once()
# no tasks are popped from the queue
self.assertEqual(mock_method.call_args_list, [])
# all tasks are done
with mock.patch.object(self.expirer, 'delete_actual_object',
lambda o, t: None), \
mock.patch.object(self.expirer, 'pop_queue') as mock_method:
self.expirer.run_once()
# all tasks are popped from the queue
self.assertEqual(
mock_method.call_args_list,
[mock.call('.expiring_objects', self.past_time,
self.past_time + '-' + target_path)
for target_path in self.expired_target_path_list])
def test_success_gets_counted(self):
self.assertEqual(self.expirer.report_objects, 0)
with mock.patch('swift.obj.expirer.MAX_OBJECTS_TO_CACHE', 0), \
mock.patch.object(self.expirer, 'delete_actual_object',
lambda o, t: None), \
mock.patch.object(self.expirer, 'pop_queue',
lambda a, c, o: None):
self.expirer.run_once()
self.assertEqual(self.expirer.report_objects, 10)
def test_delete_actual_object_does_not_get_unicode(self):
got_unicode = [False]
def delete_actual_object_test_for_unicode(actual_obj, timestamp):
if isinstance(actual_obj, six.text_type):
got_unicode[0] = True
self.assertEqual(self.expirer.report_objects, 0)
with mock.patch.object(self.expirer, 'delete_actual_object',
delete_actual_object_test_for_unicode), \
mock.patch.object(self.expirer, 'pop_queue',
lambda a, c, o: None):
self.expirer.run_once()
self.assertEqual(self.expirer.report_objects, 10)
self.assertFalse(got_unicode[0])
def test_failed_delete_continues_on(self):
def fail_delete_container(*a, **kw):
raise Exception('failed to delete container')
def fail_delete_actual_object(actual_obj, timestamp):
raise Exception('failed to delete actual object')
with mock.patch.object(self.fake_swift, 'delete_container',
fail_delete_container), \
mock.patch.object(self.expirer, 'delete_actual_object',
fail_delete_actual_object):
self.expirer.run_once()
error_lines = self.expirer.logger.get_lines_for_level('error')
self.assertEqual(error_lines, [
'Exception while deleting object %s %s %s '
'failed to delete actual object: ' % (
'.expiring_objects', self.past_time,
self.past_time + '-' + target_path)
for target_path in self.expired_target_path_list] + [
'Exception while deleting container %s %s '
'failed to delete container: ' % (
'.expiring_objects', self.past_time)])
self.assertEqual(self.expirer.logger.get_lines_for_level('info'), [
'Pass beginning for task account .expiring_objects; '
'2 possible containers; 12 possible objects',
'Pass completed in 0s; 0 objects expired',
])
def test_run_forever_initial_sleep_random(self):
global last_not_sleep
def raise_system_exit():
raise SystemExit('test_run_forever')
interval = 1234
x = expirer.ObjectExpirer({'__file__': 'unit_test',
'interval': interval})
orig_random = expirer.random
orig_sleep = expirer.sleep
try:
expirer.random = not_random
expirer.sleep = not_sleep
x.run_once = raise_system_exit
x.run_forever()
except SystemExit as err:
pass
finally:
expirer.random = orig_random
expirer.sleep = orig_sleep
self.assertEqual(str(err), 'test_run_forever')
self.assertEqual(last_not_sleep, 0.5 * interval)
def test_run_forever_catches_usual_exceptions(self):
raises = [0]
def raise_exceptions():
raises[0] += 1
if raises[0] < 2:
raise Exception('exception %d' % raises[0])
raise SystemExit('exiting exception %d' % raises[0])
x = expirer.ObjectExpirer({}, logger=self.logger)
orig_sleep = expirer.sleep
try:
expirer.sleep = not_sleep
x.run_once = raise_exceptions
x.run_forever()
except SystemExit as err:
pass
finally:
expirer.sleep = orig_sleep
self.assertEqual(str(err), 'exiting exception 2')
self.assertEqual(x.logger.get_lines_for_level('error'),
['Unhandled exception: '])
log_args, log_kwargs = x.logger.log_dict['error'][0]
self.assertEqual(str(log_kwargs['exc_info'][1]),
'exception 1')
def test_delete_actual_object(self):
got_env = [None]
def fake_app(env, start_response):
got_env[0] = env
start_response('204 No Content', [('Content-Length', '0')])
return []
internal_client.loadapp = lambda *a, **kw: fake_app
x = expirer.ObjectExpirer({})
ts = Timestamp('1234')
x.delete_actual_object('/path/to/object', ts)
self.assertEqual(got_env[0]['HTTP_X_IF_DELETE_AT'], ts)
self.assertEqual(got_env[0]['HTTP_X_TIMESTAMP'],
got_env[0]['HTTP_X_IF_DELETE_AT'])
def test_delete_actual_object_nourlquoting(self):
# delete_actual_object should not do its own url quoting because
# internal client's make_request handles that.
got_env = [None]
def fake_app(env, start_response):
got_env[0] = env
start_response('204 No Content', [('Content-Length', '0')])
return []
internal_client.loadapp = lambda *a, **kw: fake_app
x = expirer.ObjectExpirer({})
ts = Timestamp('1234')
x.delete_actual_object('/path/to/object name', ts)
self.assertEqual(got_env[0]['HTTP_X_IF_DELETE_AT'], ts)
self.assertEqual(got_env[0]['HTTP_X_TIMESTAMP'],
got_env[0]['HTTP_X_IF_DELETE_AT'])
self.assertEqual(got_env[0]['PATH_INFO'], '/v1/path/to/object name')
def test_delete_actual_object_returns_expected_error(self):
def do_test(test_status, should_raise):
calls = [0]
def fake_app(env, start_response):
calls[0] += 1
start_response(test_status, [('Content-Length', '0')])
return []
internal_client.loadapp = lambda *a, **kw: fake_app
x = expirer.ObjectExpirer({})
ts = Timestamp('1234')
if should_raise:
with self.assertRaises(internal_client.UnexpectedResponse):
x.delete_actual_object('/path/to/object', ts)
else:
x.delete_actual_object('/path/to/object', ts)
self.assertEqual(calls[0], 1)
# object was deleted and tombstone reaped
do_test('404 Not Found', True)
# object was overwritten *after* the original expiration, or
do_test('409 Conflict', False)
# object was deleted but tombstone still exists, or
# object was overwritten ahead of the original expiration, or
# object was POSTed to with a new (or no) expiration, or ...
do_test('412 Precondition Failed', True)
def test_delete_actual_object_does_not_handle_odd_stuff(self):
def fake_app(env, start_response):
start_response(
'503 Internal Server Error',
[('Content-Length', '0')])
return []
internal_client.loadapp = lambda *a, **kw: fake_app
x = expirer.ObjectExpirer({})
exc = None
try:
x.delete_actual_object('/path/to/object', Timestamp('1234'))
except Exception as err:
exc = err
finally:
pass
self.assertEqual(503, exc.resp.status_int)
def test_delete_actual_object_quotes(self):
name = 'this name should get quoted'
timestamp = Timestamp('1366063156.863045')
x = expirer.ObjectExpirer({})
x.swift.make_request = mock.Mock()
x.swift.make_request.return_value.status_int = 204
x.delete_actual_object(name, timestamp)
self.assertEqual(x.swift.make_request.call_count, 1)
self.assertEqual(x.swift.make_request.call_args[0][1],
'/v1/' + urllib.parse.quote(name))
def test_delete_actual_object_queue_cleaning(self):
name = 'something'
timestamp = Timestamp('1515544858.80602')
x = expirer.ObjectExpirer({})
x.swift.make_request = mock.MagicMock()
x.delete_actual_object(name, timestamp)
self.assertEqual(x.swift.make_request.call_count, 1)
header = 'X-Backend-Clean-Expiring-Object-Queue'
self.assertEqual(
x.swift.make_request.call_args[0][2].get(header),
'no')
def test_pop_queue(self):
x = expirer.ObjectExpirer({}, logger=self.logger,
swift=FakeInternalClient({}))
requests = []
def capture_requests(ipaddr, port, method, path, *args, **kwargs):
requests.append((method, path))
with mocked_http_conn(
200, 200, 200, give_connect=capture_requests) as fake_conn:
x.pop_queue('a', 'c', 'o')
self.assertRaises(StopIteration, fake_conn.code_iter.next)
for method, path in requests:
self.assertEqual(method, 'DELETE')
device, part, account, container, obj = utils.split_path(
path, 5, 5, True)
self.assertEqual(account, 'a')
self.assertEqual(container, 'c')
self.assertEqual(obj, 'o')
if __name__ == '__main__':
main()
| apache-2.0 | -9,147,987,645,053,107,000 | 38.264938 | 79 | 0.557999 | false |
oopy/micropython | tests/basics/exceptpoly2.py | 63 | 1789 | try:
raise MemoryError
except Exception:
print("Caught MemoryError via Exception")
try:
raise MemoryError
except MemoryError:
print("Caught MemoryError")
try:
raise NameError
except Exception:
print("Caught NameError via Exception")
try:
raise NameError
except NameError:
print("Caught NameError")
try:
raise NotImplementedError
except RuntimeError:
print("Caught NotImplementedError via RuntimeError")
try:
raise NotImplementedError
except NotImplementedError:
print("Caught NotImplementedError")
try:
raise OSError
except Exception:
print("Caught OSError via Exception")
try:
raise OSError
except OSError:
print("Caught OSError")
try:
raise OverflowError
except ArithmeticError:
print("Caught OverflowError via ArithmeticError")
try:
raise OverflowError
except OverflowError:
print("Caught OverflowError")
try:
raise RuntimeError
except Exception:
print("Caught RuntimeError via Exception")
try:
raise RuntimeError
except RuntimeError:
print("Caught RuntimeError")
try:
raise SyntaxError
except Exception:
print("Caught SyntaxError via Exception")
try:
raise SyntaxError
except SyntaxError:
print("Caught SyntaxError")
try:
raise TypeError
except Exception:
print("Caught TypeError via Exception")
try:
raise TypeError
except TypeError:
print("Caught TypeError")
try:
raise ValueError
except Exception:
print("Caught ValueError via Exception")
try:
raise ValueError
except ValueError:
print("Caught ValueError")
try:
raise ZeroDivisionError
except ArithmeticError:
print("Caught ZeroDivisionError via ArithmeticError")
try:
raise ZeroDivisionError
except ZeroDivisionError:
print("Caught ZeroDivisionError")
| mit | 6,119,470,245,631,497,000 | 17.070707 | 57 | 0.743432 | false |
haeusser/tensorflow | tensorflow/contrib/ndlstm/python/lstm2d_test.py | 22 | 3473 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for 2D LSTMs."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import sys
# TODO: #6568 Remove this hack that makes dlopen() not crash.
if hasattr(sys, "getdlopenflags") and hasattr(sys, "setdlopenflags"):
import ctypes
sys.setdlopenflags(sys.getdlopenflags() | ctypes.RTLD_GLOBAL)
import numpy as np
from tensorflow.contrib.ndlstm.python import lstm2d as lstm2d_lib
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import test_util
from tensorflow.python.ops import variables
from tensorflow.python.platform import test
lstm2d = lstm2d_lib
def _rand(*size):
return np.random.uniform(size=size).astype("f")
class Lstm2DTest(test_util.TensorFlowTestCase):
def testImagesToSequenceDims(self):
with self.test_session():
inputs = constant_op.constant(_rand(2, 7, 11, 5))
outputs = lstm2d.images_to_sequence(inputs)
variables.global_variables_initializer().run()
result = outputs.eval()
self.assertEqual(tuple(result.shape), (11, 14, 5))
def testSequenceToImagesDims(self):
with self.test_session():
inputs = constant_op.constant(_rand(11, 14, 5))
outputs = lstm2d.sequence_to_images(inputs, 2)
variables.global_variables_initializer().run()
result = outputs.eval()
self.assertEqual(tuple(result.shape), (2, 7, 11, 5))
def testImagesAndSequenceDims(self):
with self.test_session():
size = (2, 7, 11, 5)
inputs = constant_op.constant(_rand(*size))
sequence = lstm2d.images_to_sequence(inputs)
outputs = lstm2d.sequence_to_images(sequence, size[0])
variables.global_variables_initializer().run()
result = outputs.eval()
self.assertEqual(tuple(result.shape), size)
def testSeparableLstmDims(self):
with self.test_session():
inputs = constant_op.constant(_rand(2, 7, 11, 5))
outputs = lstm2d.separable_lstm(inputs, 8)
variables.global_variables_initializer().run()
result = outputs.eval()
self.assertEqual(tuple(result.shape), (2, 7, 11, 8))
def testReduceToSequenceDims(self):
with self.test_session():
inputs = constant_op.constant(_rand(2, 7, 11, 5))
outputs = lstm2d.reduce_to_sequence(inputs, 8)
variables.global_variables_initializer().run()
result = outputs.eval()
self.assertEqual(tuple(result.shape), (2, 11, 8))
def testReduceToFinalDims(self):
with self.test_session():
inputs = constant_op.constant(_rand(2, 7, 11, 5))
outputs = lstm2d.reduce_to_final(inputs, 8, 12)
variables.global_variables_initializer().run()
result = outputs.eval()
self.assertEqual(tuple(result.shape), (2, 8))
if __name__ == "__main__":
test.main()
| apache-2.0 | 2,188,381,737,227,675,600 | 34.804124 | 80 | 0.686438 | false |
runt18/nupic | tests/unit/nupic/engine/unified_py_parameter_test.py | 35 | 3604 | #!/usr/bin/env python
# ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2014, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
"""
Test for get/setParameter in python -- these methods are syntactic sugar
that allow you to access parameters without knowing their types,
at a moderate performance penalty.
"""
import unittest2 as unittest
# import for type comparison with Array.
# (Seems we should be able to use nupic.engine.Array directly.)
import nupic.bindings.engine_internal
from nupic.engine import Network
class NetworkUnifiedPyParameterTest(unittest.TestCase):
def testScalars(self):
scalars = [
("int32Param", 32, int, 35),
("uint32Param", 33, int, 36),
("int64Param", 64, long, 74),
("uint64Param", 65, long, 75),
("real32Param", 32.1, float, 33.1),
("real64Param", 64.1, float, 65.1),
("stringParam", "nodespec value", str, "new value")]
n = Network()
l1= n.addRegion("l1", "TestNode", "")
x = l1.getParameter("uint32Param")
for paramName, initval, paramtype, newval in scalars:
# Check the initial value for each parameter.
x = l1.getParameter(paramName)
self.assertEqual(type(x), paramtype)
if initval is None:
continue
if type(x) == float:
self.assertTrue(abs(x - initval) < 0.00001)
else:
self.assertEqual(x, initval)
# Now set the value, and check to make sure the value is updated
l1.setParameter(paramName, newval)
x = l1.getParameter(paramName)
self.assertEqual(type(x), paramtype)
if type(x) == float:
self.assertTrue(abs(x - newval) < 0.00001)
else:
self.assertEqual(x, newval)
def testArrays(self):
arrays = [
("real32ArrayParam",
[0*32, 1*32, 2*32, 3*32, 4*32, 5*32, 6*32, 7*32],
"Real32"),
("int64ArrayParam",
[0*64, 1*64, 2*64, 3*64],
"Int64")
]
n = Network()
l1= n.addRegion("l1", "TestNode", "")
for paramName, initval, paramtype in arrays:
x = l1.getParameter(paramName)
self.assertTrue(isinstance(x, nupic.bindings.engine_internal.Array))
self.assertEqual(x.getType(), paramtype)
self.assertEqual(len(x), len(initval))
for i in xrange(len(x)):
self.assertEqual(x[i], initval[i])
for i in xrange(len(x)):
x[i] = x[i] * 2
l1.setParameter(paramName, x)
x = l1.getParameter(paramName)
self.assertTrue(isinstance(x, nupic.bindings.engine_internal.Array))
self.assertEqual(x.getType(), paramtype)
self.assertEqual(len(x), len(initval))
for i in xrange(len(x)):
self.assertEqual(x[i], 2 * initval[i])
if __name__ == "__main__":
unittest.main()
| agpl-3.0 | -4,403,359,557,778,188,300 | 31.468468 | 74 | 0.628746 | false |
zengenti/ansible | lib/ansible/plugins/callback/junit.py | 92 | 8951 | # (c) 2016 Matt Clay <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import time
import re
from ansible.module_utils._text import to_bytes, to_text
from ansible.plugins.callback import CallbackBase
try:
from junit_xml import TestSuite, TestCase
HAS_JUNIT_XML = True
except ImportError:
HAS_JUNIT_XML = False
try:
from collections import OrderedDict
HAS_ORDERED_DICT = True
except ImportError:
try:
from ordereddict import OrderedDict
HAS_ORDERED_DICT = True
except ImportError:
HAS_ORDERED_DICT = False
class CallbackModule(CallbackBase):
"""
This callback writes playbook output to a JUnit formatted XML file.
Tasks show up in the report as follows:
'ok': pass
'failed' with 'EXPECTED FAILURE' in the task name: pass
'failed' due to an exception: error
'failed' for other reasons: failure
'skipped': skipped
This plugin makes use of the following environment variables:
JUNIT_OUTPUT_DIR (optional): Directory to write XML files to.
Default: ~/.ansible.log
JUNIT_TASK_CLASS (optional): Configure the output to be one class per yaml file
Default: False
Requires:
junit_xml
"""
CALLBACK_VERSION = 2.0
CALLBACK_TYPE = 'aggregate'
CALLBACK_NAME = 'junit'
CALLBACK_NEEDS_WHITELIST = True
def __init__(self):
super(CallbackModule, self).__init__()
self._output_dir = os.getenv('JUNIT_OUTPUT_DIR', os.path.expanduser('~/.ansible.log'))
self._task_class = os.getenv('JUNIT_TASK_CLASS', 'False').lower()
self._playbook_path = None
self._playbook_name = None
self._play_name = None
self._task_data = None
self.disabled = False
if not HAS_JUNIT_XML:
self.disabled = True
self._display.warning('The `junit_xml` python module is not installed. '
'Disabling the `junit` callback plugin.')
if HAS_ORDERED_DICT:
self._task_data = OrderedDict()
else:
self.disabled = True
self._display.warning('The `ordereddict` python module is not installed. '
'Disabling the `junit` callback plugin.')
if not os.path.exists(self._output_dir):
os.mkdir(self._output_dir)
def _start_task(self, task):
""" record the start of a task for one or more hosts """
uuid = task._uuid
if uuid in self._task_data:
return
play = self._play_name
name = task.get_name().strip()
path = task.get_path()
if not task.no_log:
args = ', '.join(('%s=%s' % a for a in task.args.items()))
if args:
name += ' ' + args
self._task_data[uuid] = TaskData(uuid, name, path, play)
def _finish_task(self, status, result):
""" record the results of a task for a single host """
task_uuid = result._task._uuid
if hasattr(result, '_host'):
host_uuid = result._host._uuid
host_name = result._host.name
else:
host_uuid = 'include'
host_name = 'include'
task_data = self._task_data[task_uuid]
if status == 'failed' and 'EXPECTED FAILURE' in task_data.name:
status = 'ok'
task_data.add_host(HostData(host_uuid, host_name, status, result))
def _build_test_case(self, task_data, host_data):
""" build a TestCase from the given TaskData and HostData """
name = '[%s] %s: %s' % (host_data.name, task_data.play, task_data.name)
duration = host_data.finish - task_data.start
if self._task_class == 'true':
junit_classname = re.sub('\.yml:[0-9]+$', '', task_data.path)
else:
junit_classname = task_data.path
if host_data.status == 'included':
return TestCase(name, junit_classname, duration, host_data.result)
res = host_data.result._result
rc = res.get('rc', 0)
dump = self._dump_results(res, indent=0)
dump = self._cleanse_string(dump)
if host_data.status == 'ok':
return TestCase(name, junit_classname, duration, dump)
test_case = TestCase(name, junit_classname, duration)
if host_data.status == 'failed':
if 'exception' in res:
message = res['exception'].strip().split('\n')[-1]
output = res['exception']
test_case.add_error_info(message, output)
elif 'msg' in res:
message = res['msg']
test_case.add_failure_info(message, dump)
else:
test_case.add_failure_info('rc=%s' % rc, dump)
elif host_data.status == 'skipped':
if 'skip_reason' in res:
message = res['skip_reason']
else:
message = 'skipped'
test_case.add_skipped_info(message)
return test_case
def _cleanse_string(self, value):
""" convert surrogate escapes to the unicode replacement character to avoid XML encoding errors """
return to_text(to_bytes(value, errors='surrogateescape'), errors='replace')
def _generate_report(self):
""" generate a TestSuite report from the collected TaskData and HostData """
test_cases = []
for task_uuid, task_data in self._task_data.items():
for host_uuid, host_data in task_data.host_data.items():
test_cases.append(self._build_test_case(task_data, host_data))
test_suite = TestSuite(self._playbook_name, test_cases)
report = TestSuite.to_xml_string([test_suite])
output_file = os.path.join(self._output_dir, '%s-%s.xml' % (self._playbook_name, time.time()))
with open(output_file, 'wb') as xml:
xml.write(to_bytes(report, errors='surrogate_or_strict'))
def v2_playbook_on_start(self, playbook):
self._playbook_path = playbook._file_name
self._playbook_name = os.path.splitext(os.path.basename(self._playbook_path))[0]
def v2_playbook_on_play_start(self, play):
self._play_name = play.get_name()
def v2_runner_on_no_hosts(self, task):
self._start_task(task)
def v2_playbook_on_task_start(self, task, is_conditional):
self._start_task(task)
def v2_playbook_on_cleanup_task_start(self, task):
self._start_task(task)
def v2_playbook_on_handler_task_start(self, task):
self._start_task(task)
def v2_runner_on_failed(self, result, ignore_errors=False):
if ignore_errors:
self._finish_task('ok', result)
else:
self._finish_task('failed', result)
def v2_runner_on_ok(self, result):
self._finish_task('ok', result)
def v2_runner_on_skipped(self, result):
self._finish_task('skipped', result)
def v2_playbook_on_include(self, included_file):
self._finish_task('included', included_file)
def v2_playbook_on_stats(self, stats):
self._generate_report()
class TaskData:
"""
Data about an individual task.
"""
def __init__(self, uuid, name, path, play):
self.uuid = uuid
self.name = name
self.path = path
self.play = play
self.start = None
self.host_data = OrderedDict()
self.start = time.time()
def add_host(self, host):
if host.uuid in self.host_data:
if host.status == 'included':
# concatenate task include output from multiple items
host.result = '%s\n%s' % (self.host_data[host.uuid].result, host.result)
else:
raise Exception('%s: %s: %s: duplicate host callback: %s' % (self.path, self.play, self.name, host.name))
self.host_data[host.uuid] = host
class HostData:
"""
Data about an individual host.
"""
def __init__(self, uuid, name, status, result):
self.uuid = uuid
self.name = name
self.status = status
self.result = result
self.finish = time.time()
| gpl-3.0 | -4,300,522,125,354,830,300 | 31.667883 | 121 | 0.594347 | false |
liggitt/docker-registry | depends/docker-registry-core/docker_registry/drivers/file.py | 38 | 4471 | # -*- coding: utf-8 -*-
# Copyright (c) 2014 Docker.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
docker_registry.drivers.file
~~~~~~~~~~~~~~~~~~~~~~~~~~
This is a simple filesystem based driver.
"""
import os
import shutil
from ..core import driver
from ..core import exceptions
from ..core import lru
class Storage(driver.Base):
supports_bytes_range = True
def __init__(self, path=None, config=None):
self._root_path = path or './tmp'
def _init_path(self, path=None, create=False):
path = os.path.join(self._root_path, path) if path else self._root_path
if create is True:
dirname = os.path.dirname(path)
if not os.path.exists(dirname):
os.makedirs(dirname)
return path
@lru.get
def get_content(self, path):
path = self._init_path(path)
try:
with open(path, mode='rb') as f:
d = f.read()
except Exception:
raise exceptions.FileNotFoundError('%s is not there' % path)
return d
@lru.set
def put_content(self, path, content):
path = self._init_path(path, create=True)
with open(path, mode='wb') as f:
f.write(content)
return path
def stream_read(self, path, bytes_range=None):
path = self._init_path(path)
nb_bytes = 0
total_size = 0
try:
with open(path, mode='rb') as f:
if bytes_range:
f.seek(bytes_range[0])
total_size = bytes_range[1] - bytes_range[0] + 1
while True:
buf = None
if bytes_range:
# Bytes Range is enabled
buf_size = self.buffer_size
if nb_bytes + buf_size > total_size:
# We make sure we don't read out of the range
buf_size = total_size - nb_bytes
if buf_size > 0:
buf = f.read(buf_size)
nb_bytes += len(buf)
else:
# We're at the end of the range
buf = ''
else:
buf = f.read(self.buffer_size)
if not buf:
break
yield buf
except IOError:
raise exceptions.FileNotFoundError('%s is not there' % path)
def stream_write(self, path, fp):
# Size is mandatory
path = self._init_path(path, create=True)
with open(path, mode='wb') as f:
try:
while True:
buf = fp.read(self.buffer_size)
if not buf:
break
f.write(buf)
except IOError:
pass
def list_directory(self, path=None):
prefix = ''
if path:
prefix = '%s/' % path
path = self._init_path(path)
exists = False
try:
for d in os.listdir(path):
exists = True
yield prefix + d
except Exception:
pass
if not exists:
raise exceptions.FileNotFoundError('%s is not there' % path)
def exists(self, path):
path = self._init_path(path)
return os.path.exists(path)
@lru.remove
def remove(self, path):
path = self._init_path(path)
if os.path.isdir(path):
shutil.rmtree(path)
return
try:
os.remove(path)
except OSError:
raise exceptions.FileNotFoundError('%s is not there' % path)
def get_size(self, path):
path = self._init_path(path)
try:
return os.path.getsize(path)
except OSError:
raise exceptions.FileNotFoundError('%s is not there' % path)
| apache-2.0 | 221,638,211,775,621,820 | 29.834483 | 79 | 0.514203 | false |
mhild/Sick-Beard | lib/hachoir_core/field/seekable_field_set.py | 90 | 6130 | from lib.hachoir_core.field import Field, BasicFieldSet, FakeArray, MissingField, ParserError
from lib.hachoir_core.tools import makeUnicode
from lib.hachoir_core.error import HACHOIR_ERRORS
from itertools import repeat
import lib.hachoir_core.config as config
class RootSeekableFieldSet(BasicFieldSet):
def __init__(self, parent, name, stream, description, size):
BasicFieldSet.__init__(self, parent, name, stream, description, size)
self._generator = self.createFields()
self._offset = 0
self._current_size = 0
if size:
self._current_max_size = size
else:
self._current_max_size = 0
self._field_dict = {}
self._field_array = []
def _feedOne(self):
assert self._generator
field = self._generator.next()
self._addField(field)
return field
def array(self, key):
return FakeArray(self, key)
def getFieldByAddress(self, address, feed=True):
for field in self._field_array:
if field.address <= address < field.address + field.size:
return field
for field in self._readFields():
if field.address <= address < field.address + field.size:
return field
return None
def _stopFeed(self):
self._size = self._current_max_size
self._generator = None
done = property(lambda self: not bool(self._generator))
def _getSize(self):
if self._size is None:
self._feedAll()
return self._size
size = property(_getSize)
def _getField(self, key, const):
field = Field._getField(self, key, const)
if field is not None:
return field
if key in self._field_dict:
return self._field_dict[key]
if self._generator and not const:
try:
while True:
field = self._feedOne()
if field.name == key:
return field
except StopIteration:
self._stopFeed()
except HACHOIR_ERRORS, err:
self.error("Error: %s" % makeUnicode(err))
self._stopFeed()
return None
def getField(self, key, const=True):
if isinstance(key, (int, long)):
if key < 0:
raise KeyError("Key must be positive!")
if not const:
self.readFirstFields(key+1)
if len(self._field_array) <= key:
raise MissingField(self, key)
return self._field_array[key]
return Field.getField(self, key, const)
def _addField(self, field):
if field._name.endswith("[]"):
self.setUniqueFieldName(field)
if config.debug:
self.info("[+] DBG: _addField(%s)" % field.name)
if field._address != self._offset:
self.warning("Set field %s address to %s (was %s)" % (
field.path, self._offset//8, field._address//8))
field._address = self._offset
assert field.name not in self._field_dict
self._checkFieldSize(field)
self._field_dict[field.name] = field
self._field_array.append(field)
self._current_size += field.size
self._offset += field.size
self._current_max_size = max(self._current_max_size, field.address + field.size)
def _checkAddress(self, address):
if self._size is not None:
max_addr = self._size
else:
# FIXME: Use parent size
max_addr = self.stream.size
return address < max_addr
def _checkFieldSize(self, field):
size = field.size
addr = field.address
if not self._checkAddress(addr+size-1):
raise ParserError("Unable to add %s: field is too large" % field.name)
def seekBit(self, address, relative=True):
if not relative:
address -= self.absolute_address
if address < 0:
raise ParserError("Seek below field set start (%s.%s)" % divmod(address, 8))
if not self._checkAddress(address):
raise ParserError("Seek above field set end (%s.%s)" % divmod(address, 8))
self._offset = address
return None
def seekByte(self, address, relative=True):
return self.seekBit(address*8, relative)
def readMoreFields(self, number):
return self._readMoreFields(xrange(number))
def _feedAll(self):
return self._readMoreFields(repeat(1))
def _readFields(self):
while True:
added = self._readMoreFields(xrange(1))
if not added:
break
yield self._field_array[-1]
def _readMoreFields(self, index_generator):
added = 0
if self._generator:
try:
for index in index_generator:
self._feedOne()
added += 1
except StopIteration:
self._stopFeed()
except HACHOIR_ERRORS, err:
self.error("Error: %s" % makeUnicode(err))
self._stopFeed()
return added
current_length = property(lambda self: len(self._field_array))
current_size = property(lambda self: self._offset)
def __iter__(self):
for field in self._field_array:
yield field
if self._generator:
try:
while True:
yield self._feedOne()
except StopIteration:
self._stopFeed()
raise StopIteration
def __len__(self):
if self._generator:
self._feedAll()
return len(self._field_array)
def nextFieldAddress(self):
return self._offset
def getFieldIndex(self, field):
return self._field_array.index(field)
class SeekableFieldSet(RootSeekableFieldSet):
def __init__(self, parent, name, description=None, size=None):
assert issubclass(parent.__class__, BasicFieldSet)
RootSeekableFieldSet.__init__(self, parent, name, parent.stream, description, size)
| gpl-3.0 | -5,938,486,074,494,937,000 | 32.681319 | 93 | 0.567537 | false |
tectronics/admiral-jiscmrd | test/MiscLib/tests/TestSuperGlobal.py | 8 | 1649 | # $Id: TestSuperGlobal.py 1047 2009-01-15 14:48:58Z graham $
#
# Unit testing for SuperGlobal module
# See http://pyunit.sourceforge.net/pyunit.html
#
import sys
import unittest
sys.path.append("../..")
from MiscLib.SuperGlobal import *
class TestSuperGlobal(unittest.TestCase):
def setUp(self):
return
def tearDown(self):
return
# Test cases
def testCreateSuperGlobal(self):
sg = SuperGlobal()
sg.i1 = 1
sg.s1 = "s1"
self.assertEqual(sg.i1, 1)
self.assertEqual(sg.s1, "s1")
def testRetrieveSuperGlobal(self):
self.testCreateSuperGlobal()
sg = SuperGlobal()
self.assertEqual(sg.i1, 1)
self.assertEqual(sg.s1, "s1")
def testUpdateSuperGlobal(self):
self.testCreateSuperGlobal()
sg = SuperGlobal()
sg.i2 = 2
sg.s2 = "s2"
self.assertEqual(sg.i1, 1)
self.assertEqual(sg.s1, "s1")
self.assertEqual(sg.i2, 2)
self.assertEqual(sg.s2, "s2")
sg2 = SuperGlobal()
sg2.i1 = 11
sg2.s1 = "s11"
self.assertEqual(sg.i1, 11)
self.assertEqual(sg.s1, "s11")
self.assertEqual(sg.i2, 2)
self.assertEqual(sg.s2, "s2")
# Code to run unit tests directly from command line.
def getTestSuite():
suite = unittest.TestSuite()
suite.addTest(TestSuperGlobal("testCreateSuperGlobal"))
suite.addTest(TestSuperGlobal("testRetrieveSuperGlobal"))
suite.addTest(TestSuperGlobal("testUpdateSuperGlobal"))
return suite
if __name__ == "__main__":
runner = unittest.TextTestRunner()
runner.run(getTestSuite())
# End.
| mit | 1,674,266,141,606,934,000 | 24.369231 | 61 | 0.625227 | false |
yavalvas/yav_com | build/matplotlib/examples/widgets/slider_demo.py | 13 | 1179 | import numpy as np
import matplotlib.pyplot as plt
from matplotlib.widgets import Slider, Button, RadioButtons
fig, ax = plt.subplots()
plt.subplots_adjust(left=0.25, bottom=0.25)
t = np.arange(0.0, 1.0, 0.001)
a0 = 5
f0 = 3
s = a0*np.sin(2*np.pi*f0*t)
l, = plt.plot(t,s, lw=2, color='red')
plt.axis([0, 1, -10, 10])
axcolor = 'lightgoldenrodyellow'
axfreq = plt.axes([0.25, 0.1, 0.65, 0.03], axisbg=axcolor)
axamp = plt.axes([0.25, 0.15, 0.65, 0.03], axisbg=axcolor)
sfreq = Slider(axfreq, 'Freq', 0.1, 30.0, valinit=f0)
samp = Slider(axamp, 'Amp', 0.1, 10.0, valinit=a0)
def update(val):
amp = samp.val
freq = sfreq.val
l.set_ydata(amp*np.sin(2*np.pi*freq*t))
fig.canvas.draw_idle()
sfreq.on_changed(update)
samp.on_changed(update)
resetax = plt.axes([0.8, 0.025, 0.1, 0.04])
button = Button(resetax, 'Reset', color=axcolor, hovercolor='0.975')
def reset(event):
sfreq.reset()
samp.reset()
button.on_clicked(reset)
rax = plt.axes([0.025, 0.5, 0.15, 0.15], axisbg=axcolor)
radio = RadioButtons(rax, ('red', 'blue', 'green'), active=0)
def colorfunc(label):
l.set_color(label)
fig.canvas.draw_idle()
radio.on_clicked(colorfunc)
plt.show()
| mit | -6,826,201,083,612,031,000 | 26.418605 | 68 | 0.660729 | false |
karmix/anaconda | tests/gui/inside/storage.py | 13 | 9863 | # Copyright (C) 2014 Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation; either version 2.1 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Author: Chris Lumens <[email protected]>
from dogtail.predicate import GenericPredicate
from dogtail.utils import doDelay
from . import UITestCase
class BasicStorageTestCase(UITestCase):
def check_select_disks(self, spoke):
# FIXME: This is a really roundabout way of determining whether a disk is
# selected or not. For some reason when a DiskOverview is selected, its icon
# loses the name "Hard Disk". For now, we can use this to check.
def _selected(do):
return len(do.findChildren(GenericPredicate(name="Hard Disk"))) == 0
# A real disk is any disk attached to the VM that's not the special 10 MB
# one used for communicating in and out of the VM. Therefore, scan all
# children of the DiskOverview looking for one whose name contains that
# size.
def _real_disk(do):
for child in do.findChildren(GenericPredicate()):
if child.name == "10 MiB":
return False
return True
# There should be some disks displayed on the screen.
overviews = spoke.findChildren(GenericPredicate(roleName="disk overview"))
self.assertGreater(len(overviews), 0, msg="No disks are displayed")
if len(overviews) == 1:
# Only one disk was given to this test case, so anaconda selected it
# by default. Verify. Not sure how this would happen with how
# testing is currently done, but it's good to be prepared.
self.assertTrue(_selected(overviews[0]))
else:
# More than one disk was provided, so anaconda did not select any.
# Let's select all disks and proceed.
for overview in filter(_real_disk, overviews):
self.assertFalse(_selected(overview))
overview.click()
self.assertTrue(_selected(overview))
def check_shopping_cart(self, spoke):
pass
def check_storage_options(self, spoke):
button = self.find("Automatically configure partitioning.", "radio button", node=spoke)
self.assertIsNotNone(button, msg="Autopart button not found")
self.assertTrue(button.checked, msg="Autopart should be selected")
button = self.find("I would like to make additional space available.", "check box", node=spoke)
self.assertIsNotNone(button, msg="Reclaim button not found")
self.assertFalse(button.checked, msg="Reclaim button should not be selected")
button = self.find("Encrypt my data.", "check box", node=spoke)
self.assertIsNotNone(button, msg="Encrypt button not found")
self.assertFalse(button.checked, msg="Encrypt button should not be selected")
def _common_run(self):
# First, we need to click on the storage spoke selector.
self.enter_spoke("INSTALLATION DESTINATION")
# Now verify we are on the right screen.
w = self.check_window_displayed("INSTALLATION DESTINATION")
self.check_help_button(w)
# Given that we attach a second disk to the system (for storing the test
# suite and results), anaconda will not select disks by default. Thus,
# the storage options panel should currently be insensitive.
area = self.find("Storage Options", node=w)
self.assertIsNotNone(area, "Storage Options not found")
self.assertFalse(area.sensitive, msg="Storage options should be insensitive")
# Select disk overviews. In the basic case, this means uninitialized
# disks that we're going to do autopart on.
self.check_select_disks(w)
# And now with disks selected, the storage options should be sensitive.
self.assertTrue(area.sensitive, msg="Storage options should be sensitive")
self.check_shopping_cart(w)
self.check_storage_options(w)
return w
def _run(self):
w = self._common_run()
# And then we click the Done button which should take the user right back to
# the hub. There's no need to display any other dialogs given that this is
# an install against empty disks and no other options were checked.
self.exit_spoke(node=w)
class BasicReclaimTestCase(BasicStorageTestCase):
def check_reclaim_buttons_before(self, dlg):
# Test initial sensitivity of widgets upon entering the reclaim dialog. A
# partition should be selected in the view, the only way out should be via
# the Cancel button, and the only operation available on the selected partition
# should be deleting.
button = self.find("Preserve", "push button", node=dlg)
self.assertIsNotNone(button, msg="Preserve button not found")
self.assertFalse(button.sensitive, msg="Preserve button should be insensitive")
button = self.find("Delete", "push button", node=dlg)
self.assertIsNotNone(button, msg="Delete button not found")
self.assertTrue(button.sensitive, msg="Delete button should be sensitive")
button = self.find("Shrink", "push button", node=dlg)
self.assertIsNotNone(button, msg="Shrink button not found")
self.assertFalse(button.sensitive, msg="Shrink button should be insensitive")
button = self.find("Delete all", "push button", node=dlg)
self.assertIsNotNone(button, msg="Delete all button not found")
self.assertTrue(button.sensitive, msg="Delete all button should be sensitive")
button = self.find("Cancel", "push button", node=dlg)
self.assertIsNotNone(button, msg="Cancel button not found")
self.assertTrue(button.sensitive, msg="Cancel button should be sensitive")
button = self.find("Reclaim space", "push button", node=dlg)
self.assertIsNotNone(button, msg="Reclaim button not found")
self.assertFalse(button.sensitive, msg="Reclaim button should be insensitive")
def check_reclaim_buttons_after(self, dlg):
# Test sensitivity of widgets now that enough space has been freed up on disks
# to continue. The Preserve buttons should be the only available operations,
# Delete all should have been renamed to Preserve All, and there should now be
# two ways out of the dialog.
button = self.find("Preserve", "push button", node=dlg)
self.assertIsNotNone(button, msg="Preserve button not found")
self.assertTrue(button.sensitive, msg="Preserve button should be sensitive")
button = self.find("Delete", "push button", node=dlg)
self.assertIsNotNone(button, msg="Delete button not found")
self.assertFalse(button.sensitive, msg="Delete button should be insensitive")
button = self.find("Shrink", "push button", node=dlg)
self.assertIsNotNone(button, msg="Shrink button not found")
self.assertFalse(button.sensitive, msg="Shrink button should be insensitive")
button = self.find("Preserve all", "push button", node=dlg)
self.assertIsNotNone(button, msg="Preserve all button not found")
self.assertTrue(button.sensitive, msg="Preserve all button should be sensitive")
button = self.find("Cancel", "push button", node=dlg)
self.assertIsNotNone(button, msg="Cancel button not found")
self.assertTrue(button.sensitive, msg="Cancel button should be sensitive")
button = self.find("Reclaim space", "push button", node=dlg)
self.assertIsNotNone(button, msg="Reclaim button not found")
self.assertTrue(button.sensitive, msg="Reclaim button should be sensitive")
def check_reclaim(self, optionsDlg):
self.click_button("Reclaim space", node=optionsDlg)
# Verify we are on the reclaim dialog.
reclaimDlg = self.check_dialog_displayed("Reclaim")
self.check_reclaim_buttons_before(reclaimDlg)
# Click the Delete all button to free up enough space.
self.click_button("Delete all", node=reclaimDlg)
self.check_reclaim_buttons_after(reclaimDlg)
# Click on Reclaim space, which should take us all the way back to the hub.
self.click_button("Reclaim space", node=reclaimDlg)
doDelay(5)
self.check_window_displayed("INSTALLATION SUMMARY")
def _run(self):
w = self._common_run()
# Clicking the Done button should bring up the installation options dialog
# indicating there's not currently enough space to install, but more space
# can be made by going to the reclaim dialog.
self.click_button("_Done", node=w)
optionsDlg = self.check_dialog_displayed("Need Space")
self.check_reclaim(optionsDlg)
class CantReclaimTestCase(BasicStorageTestCase):
def _run(self):
w = self._common_run()
# Clicking the Done button should bring up the installation options dialog
# indicating there's never going to be enough space to install. There's nothing
# to do now but quit.
self.click_button("_Done", node=w)
doDelay(5)
optionsDlg = self.check_dialog_displayed("No Space")
self.click_button("Quit installer", node=optionsDlg)
| gpl-2.0 | -3,789,481,931,969,705,500 | 47.348039 | 103 | 0.677988 | false |
rismalrv/edx-platform | common/lib/xmodule/xmodule/foldit_module.py | 68 | 6538 | import logging
from lxml import etree
from pkg_resources import resource_string
from xmodule.editing_module import EditingDescriptor
from xmodule.x_module import XModule
from xmodule.xml_module import XmlDescriptor
from xblock.fields import Scope, Integer, String
from .fields import Date
log = logging.getLogger(__name__)
class FolditFields(object):
# default to what Spring_7012x uses
required_level_half_credit = Integer(default=3, scope=Scope.settings)
required_sublevel_half_credit = Integer(default=5, scope=Scope.settings)
required_level = Integer(default=4, scope=Scope.settings)
required_sublevel = Integer(default=5, scope=Scope.settings)
due = Date(help="Date that this problem is due by", scope=Scope.settings)
show_basic_score = String(scope=Scope.settings, default='false')
show_leaderboard = String(scope=Scope.settings, default='false')
class FolditModule(FolditFields, XModule):
css = {'scss': [resource_string(__name__, 'css/foldit/leaderboard.scss')]}
def __init__(self, *args, **kwargs):
"""
Example:
<foldit show_basic_score="true"
required_level="4"
required_sublevel="3"
required_level_half_credit="2"
required_sublevel_half_credit="3"
show_leaderboard="false"/>
"""
super(FolditModule, self).__init__(*args, **kwargs)
self.due_time = self.due
def is_complete(self):
"""
Did the user get to the required level before the due date?
"""
# We normally don't want django dependencies in xmodule. foldit is
# special. Import this late to avoid errors with things not yet being
# initialized.
from foldit.models import PuzzleComplete
complete = PuzzleComplete.is_level_complete(
self.system.anonymous_student_id,
self.required_level,
self.required_sublevel,
self.due_time)
return complete
def is_half_complete(self):
"""
Did the user reach the required level for half credit?
Ideally this would be more flexible than just 0, 0.5, or 1 credit. On
the other hand, the xml attributes for specifying more specific
cut-offs and partial grades can get more confusing.
"""
from foldit.models import PuzzleComplete
complete = PuzzleComplete.is_level_complete(
self.system.anonymous_student_id,
self.required_level_half_credit,
self.required_sublevel_half_credit,
self.due_time)
return complete
def completed_puzzles(self):
"""
Return a list of puzzles that this user has completed, as an array of
dicts:
[ {'set': int,
'subset': int,
'created': datetime} ]
The list is sorted by set, then subset
"""
from foldit.models import PuzzleComplete
return sorted(
PuzzleComplete.completed_puzzles(self.system.anonymous_student_id),
key=lambda d: (d['set'], d['subset']))
def puzzle_leaders(self, n=10, courses=None):
"""
Returns a list of n pairs (user, score) corresponding to the top
scores; the pairs are in descending order of score.
"""
from foldit.models import Score
if courses is None:
courses = [self.location.course_key]
leaders = [(leader['username'], leader['score']) for leader in Score.get_tops_n(10, course_list=courses)]
leaders.sort(key=lambda x: -x[1])
return leaders
def get_html(self):
"""
Render the html for the module.
"""
goal_level = '{0}-{1}'.format(
self.required_level,
self.required_sublevel)
showbasic = (self.show_basic_score.lower() == "true")
showleader = (self.show_leaderboard.lower() == "true")
context = {
'due': self.due,
'success': self.is_complete(),
'goal_level': goal_level,
'completed': self.completed_puzzles(),
'top_scores': self.puzzle_leaders(),
'show_basic': showbasic,
'show_leader': showleader,
'folditbasic': self.get_basicpuzzles_html(),
'folditchallenge': self.get_challenge_html()
}
return self.system.render_template('foldit.html', context)
def get_basicpuzzles_html(self):
"""
Render html for the basic puzzle section.
"""
goal_level = '{0}-{1}'.format(
self.required_level,
self.required_sublevel)
context = {
'due': self.due,
'success': self.is_complete(),
'goal_level': goal_level,
'completed': self.completed_puzzles(),
}
return self.system.render_template('folditbasic.html', context)
def get_challenge_html(self):
"""
Render html for challenge (i.e., the leaderboard)
"""
context = {
'top_scores': self.puzzle_leaders()}
return self.system.render_template('folditchallenge.html', context)
def get_score(self):
"""
0 if required_level_half_credit - required_sublevel_half_credit not
reached.
0.5 if required_level_half_credit and required_sublevel_half_credit
reached.
1 if requred_level and required_sublevel reached.
"""
if self.is_complete():
score = 1
elif self.is_half_complete():
score = 0.5
else:
score = 0
return {'score': score,
'total': self.max_score()}
def max_score(self):
return 1
class FolditDescriptor(FolditFields, XmlDescriptor, EditingDescriptor):
"""
Module for adding Foldit problems to courses
"""
mako_template = "widgets/html-edit.html"
module_class = FolditModule
filename_extension = "xml"
has_score = True
show_in_read_only_mode = True
js = {'coffee': [resource_string(__name__, 'js/src/html/edit.coffee')]}
js_module_name = "HTMLEditingDescriptor"
# The grade changes without any student interaction with the edx website,
# so always need to actually check.
always_recalculate_grades = True
@classmethod
def definition_from_xml(cls, xml_object, system):
return {}, []
def definition_to_xml(self, resource_fs):
xml_object = etree.Element('foldit')
return xml_object
| agpl-3.0 | 4,699,576,801,311,757,000 | 30.737864 | 113 | 0.604772 | false |
neumerance/deploy | .venv/lib/python2.7/site-packages/django/contrib/gis/gdal/srs.py | 219 | 11914 | """
The Spatial Reference class, represensents OGR Spatial Reference objects.
Example:
>>> from django.contrib.gis.gdal import SpatialReference
>>> srs = SpatialReference('WGS84')
>>> print(srs)
GEOGCS["WGS 84",
DATUM["WGS_1984",
SPHEROID["WGS 84",6378137,298.257223563,
AUTHORITY["EPSG","7030"]],
TOWGS84[0,0,0,0,0,0,0],
AUTHORITY["EPSG","6326"]],
PRIMEM["Greenwich",0,
AUTHORITY["EPSG","8901"]],
UNIT["degree",0.01745329251994328,
AUTHORITY["EPSG","9122"]],
AUTHORITY["EPSG","4326"]]
>>> print(srs.proj)
+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs
>>> print(srs.ellipsoid)
(6378137.0, 6356752.3142451793, 298.25722356300003)
>>> print(srs.projected, srs.geographic)
False True
>>> srs.import_epsg(32140)
>>> print(srs.name)
NAD83 / Texas South Central
"""
from ctypes import byref, c_char_p, c_int
# Getting the error checking routine and exceptions
from django.contrib.gis.gdal.base import GDALBase
from django.contrib.gis.gdal.error import SRSException
from django.contrib.gis.gdal.prototypes import srs as capi
from django.utils import six
from django.utils.encoding import force_bytes
#### Spatial Reference class. ####
class SpatialReference(GDALBase):
"""
A wrapper for the OGRSpatialReference object. According to the GDAL Web site,
the SpatialReference object "provide[s] services to represent coordinate
systems (projections and datums) and to transform between them."
"""
#### Python 'magic' routines ####
def __init__(self, srs_input=''):
"""
Creates a GDAL OSR Spatial Reference object from the given input.
The input may be string of OGC Well Known Text (WKT), an integer
EPSG code, a PROJ.4 string, and/or a projection "well known" shorthand
string (one of 'WGS84', 'WGS72', 'NAD27', 'NAD83').
"""
srs_type = 'user'
if isinstance(srs_input, six.string_types):
# Encoding to ASCII if unicode passed in.
if isinstance(srs_input, six.text_type):
srs_input = srs_input.encode('ascii')
try:
# If SRID is a string, e.g., '4326', then make acceptable
# as user input.
srid = int(srs_input)
srs_input = 'EPSG:%d' % srid
except ValueError:
pass
elif isinstance(srs_input, six.integer_types):
# EPSG integer code was input.
srs_type = 'epsg'
elif isinstance(srs_input, self.ptr_type):
srs = srs_input
srs_type = 'ogr'
else:
raise TypeError('Invalid SRS type "%s"' % srs_type)
if srs_type == 'ogr':
# Input is already an SRS pointer.
srs = srs_input
else:
# Creating a new SRS pointer, using the string buffer.
buf = c_char_p(b'')
srs = capi.new_srs(buf)
# If the pointer is NULL, throw an exception.
if not srs:
raise SRSException('Could not create spatial reference from: %s' % srs_input)
else:
self.ptr = srs
# Importing from either the user input string or an integer SRID.
if srs_type == 'user':
self.import_user_input(srs_input)
elif srs_type == 'epsg':
self.import_epsg(srs_input)
def __del__(self):
"Destroys this spatial reference."
if self._ptr: capi.release_srs(self._ptr)
def __getitem__(self, target):
"""
Returns the value of the given string attribute node, None if the node
doesn't exist. Can also take a tuple as a parameter, (target, child),
where child is the index of the attribute in the WKT. For example:
>>> wkt = 'GEOGCS["WGS 84", DATUM["WGS_1984, ... AUTHORITY["EPSG","4326"]]')
>>> srs = SpatialReference(wkt) # could also use 'WGS84', or 4326
>>> print(srs['GEOGCS'])
WGS 84
>>> print(srs['DATUM'])
WGS_1984
>>> print(srs['AUTHORITY'])
EPSG
>>> print(srs['AUTHORITY', 1]) # The authority value
4326
>>> print(srs['TOWGS84', 4]) # the fourth value in this wkt
0
>>> print(srs['UNIT|AUTHORITY']) # For the units authority, have to use the pipe symbole.
EPSG
>>> print(srs['UNIT|AUTHORITY', 1]) # The authority value for the untis
9122
"""
if isinstance(target, tuple):
return self.attr_value(*target)
else:
return self.attr_value(target)
def __str__(self):
"The string representation uses 'pretty' WKT."
return self.pretty_wkt
#### SpatialReference Methods ####
def attr_value(self, target, index=0):
"""
The attribute value for the given target node (e.g. 'PROJCS'). The index
keyword specifies an index of the child node to return.
"""
if not isinstance(target, six.string_types) or not isinstance(index, int):
raise TypeError
return capi.get_attr_value(self.ptr, force_bytes(target), index)
def auth_name(self, target):
"Returns the authority name for the given string target node."
return capi.get_auth_name(self.ptr, force_bytes(target))
def auth_code(self, target):
"Returns the authority code for the given string target node."
return capi.get_auth_code(self.ptr, force_bytes(target))
def clone(self):
"Returns a clone of this SpatialReference object."
return SpatialReference(capi.clone_srs(self.ptr))
def from_esri(self):
"Morphs this SpatialReference from ESRI's format to EPSG."
capi.morph_from_esri(self.ptr)
def identify_epsg(self):
"""
This method inspects the WKT of this SpatialReference, and will
add EPSG authority nodes where an EPSG identifier is applicable.
"""
capi.identify_epsg(self.ptr)
def to_esri(self):
"Morphs this SpatialReference to ESRI's format."
capi.morph_to_esri(self.ptr)
def validate(self):
"Checks to see if the given spatial reference is valid."
capi.srs_validate(self.ptr)
#### Name & SRID properties ####
@property
def name(self):
"Returns the name of this Spatial Reference."
if self.projected: return self.attr_value('PROJCS')
elif self.geographic: return self.attr_value('GEOGCS')
elif self.local: return self.attr_value('LOCAL_CS')
else: return None
@property
def srid(self):
"Returns the SRID of top-level authority, or None if undefined."
try:
return int(self.attr_value('AUTHORITY', 1))
except (TypeError, ValueError):
return None
#### Unit Properties ####
@property
def linear_name(self):
"Returns the name of the linear units."
units, name = capi.linear_units(self.ptr, byref(c_char_p()))
return name
@property
def linear_units(self):
"Returns the value of the linear units."
units, name = capi.linear_units(self.ptr, byref(c_char_p()))
return units
@property
def angular_name(self):
"Returns the name of the angular units."
units, name = capi.angular_units(self.ptr, byref(c_char_p()))
return name
@property
def angular_units(self):
"Returns the value of the angular units."
units, name = capi.angular_units(self.ptr, byref(c_char_p()))
return units
@property
def units(self):
"""
Returns a 2-tuple of the units value and the units name,
and will automatically determines whether to return the linear
or angular units.
"""
units, name = None, None
if self.projected or self.local:
units, name = capi.linear_units(self.ptr, byref(c_char_p()))
elif self.geographic:
units, name = capi.angular_units(self.ptr, byref(c_char_p()))
if name is not None:
name.decode()
return (units, name)
#### Spheroid/Ellipsoid Properties ####
@property
def ellipsoid(self):
"""
Returns a tuple of the ellipsoid parameters:
(semimajor axis, semiminor axis, and inverse flattening)
"""
return (self.semi_major, self.semi_minor, self.inverse_flattening)
@property
def semi_major(self):
"Returns the Semi Major Axis for this Spatial Reference."
return capi.semi_major(self.ptr, byref(c_int()))
@property
def semi_minor(self):
"Returns the Semi Minor Axis for this Spatial Reference."
return capi.semi_minor(self.ptr, byref(c_int()))
@property
def inverse_flattening(self):
"Returns the Inverse Flattening for this Spatial Reference."
return capi.invflattening(self.ptr, byref(c_int()))
#### Boolean Properties ####
@property
def geographic(self):
"""
Returns True if this SpatialReference is geographic
(root node is GEOGCS).
"""
return bool(capi.isgeographic(self.ptr))
@property
def local(self):
"Returns True if this SpatialReference is local (root node is LOCAL_CS)."
return bool(capi.islocal(self.ptr))
@property
def projected(self):
"""
Returns True if this SpatialReference is a projected coordinate system
(root node is PROJCS).
"""
return bool(capi.isprojected(self.ptr))
#### Import Routines #####
def import_epsg(self, epsg):
"Imports the Spatial Reference from the EPSG code (an integer)."
capi.from_epsg(self.ptr, epsg)
def import_proj(self, proj):
"Imports the Spatial Reference from a PROJ.4 string."
capi.from_proj(self.ptr, proj)
def import_user_input(self, user_input):
"Imports the Spatial Reference from the given user input string."
capi.from_user_input(self.ptr, force_bytes(user_input))
def import_wkt(self, wkt):
"Imports the Spatial Reference from OGC WKT (string)"
capi.from_wkt(self.ptr, byref(c_char_p(wkt)))
def import_xml(self, xml):
"Imports the Spatial Reference from an XML string."
capi.from_xml(self.ptr, xml)
#### Export Properties ####
@property
def wkt(self):
"Returns the WKT representation of this Spatial Reference."
return capi.to_wkt(self.ptr, byref(c_char_p()))
@property
def pretty_wkt(self, simplify=0):
"Returns the 'pretty' representation of the WKT."
return capi.to_pretty_wkt(self.ptr, byref(c_char_p()), simplify)
@property
def proj(self):
"Returns the PROJ.4 representation for this Spatial Reference."
return capi.to_proj(self.ptr, byref(c_char_p()))
@property
def proj4(self):
"Alias for proj()."
return self.proj
@property
def xml(self, dialect=''):
"Returns the XML representation of this Spatial Reference."
return capi.to_xml(self.ptr, byref(c_char_p()), dialect)
class CoordTransform(GDALBase):
"The coordinate system transformation object."
def __init__(self, source, target):
"Initializes on a source and target SpatialReference objects."
if not isinstance(source, SpatialReference) or not isinstance(target, SpatialReference):
raise TypeError('source and target must be of type SpatialReference')
self.ptr = capi.new_ct(source._ptr, target._ptr)
self._srs1_name = source.name
self._srs2_name = target.name
def __del__(self):
"Deletes this Coordinate Transformation object."
if self._ptr: capi.destroy_ct(self._ptr)
def __str__(self):
return 'Transform from "%s" to "%s"' % (self._srs1_name, self._srs2_name)
| apache-2.0 | 3,991,573,467,598,780,400 | 33.836257 | 97 | 0.609619 | false |
rambo/asylum | project/members/models.py | 4 | 6401 | # -*- coding: utf-8 -*-
import random
from decimal import Decimal
from access.models import AccessType
from access.utils import resolve_acl
from django.core.exceptions import ValidationError
from django.db import models, transaction
from django.utils import timezone
from django.utils.translation import ugettext_lazy as _
from django_markdown.models import MarkdownField
from reversion import revisions
from asylum.models import AsylumModel
from .handlers import call_saves, get_handler_instance
def generate_unique_randomid():
"""Generate pseudorandom ids until a free one is found"""
candidate = "0x%x" % random.randint(1, 2 ** 32)
while Member.objects.filter(anonymized_id=candidate).count():
candidate = "0x%x" % random.randint(1, 2 ** 32)
return candidate
def generate_unique_memberid():
"""Gives the next highest member id"""
try:
highest_member = Member.objects.exclude(member_id=None).order_by('-member_id')[0]
highest = highest_member.member_id
except IndexError:
highest = 0
candidate = highest + 1
while Member.objects.filter(member_id=candidate).count():
candidate += 1
return candidate
class MemberCommon(AsylumModel):
fname = models.CharField(_("First name"), max_length=200, blank=False)
lname = models.CharField(_("Last name"), max_length=200, blank=False)
city = models.CharField(_("City of residence"), max_length=200, blank=False)
email = models.EmailField(_("Email address"), unique=True, blank=False)
phone = models.CharField(_("Phone number"), max_length=200, blank=True)
nick = models.CharField(_("Nickname"), max_length=200, blank=True)
def __str__(self):
return '"%s, %s" <%s>' % (self.lname, self.fname, self.email)
@property
def name(self):
return "%s %s" % (self.fname, self.lname)
@property
def rname(self):
return "%s, %s" % (self.lname, self.fname)
@property
def access_acl(self):
return resolve_acl(AccessType.objects.filter(pk__in=self.access_granted.select_related('atype').values_list('atype', flat=True)))
class Meta:
abstract = True
ordering = ['lname', 'fname']
class MemberType(AsylumModel):
label = models.CharField(_("Label"), max_length=200, blank=False)
def __str__(self):
return self.label
class Meta:
verbose_name = _('Member Type')
verbose_name_plural = _('Member Types')
revisions.default_revision_manager.register(MemberType)
class Member(MemberCommon):
accepted = models.DateField(_("Date accepted"), default=timezone.now)
mtypes = models.ManyToManyField(MemberType, related_name='+', verbose_name=_("Membership types"), blank=True)
anonymized_id = models.CharField(_("Anonymized id (for use in external databases)"), max_length=24, unique=True, blank=True, null=True, default=generate_unique_randomid)
member_id = models.PositiveIntegerField(_("Member id no"), blank=True, null=True, unique=True, default=generate_unique_memberid)
@property
def credit(self):
ret = self.creditor_transactions.all().aggregate(models.Sum('amount'))['amount__sum']
if ret == None:
return Decimal(0.0)
return ret
@call_saves('MEMBER_CALLBACKS_HANDLER')
def save(self, *args, **kwargs):
return super().save(*args, **kwargs)
class Meta:
verbose_name = _('Member')
verbose_name_plural = _('Members')
revisions.default_revision_manager.register(Member)
class MembershipApplicationTag(AsylumModel):
label = models.CharField(_("Label"), max_length=200, blank=False)
def __str__(self):
return self.label
class Meta:
verbose_name = _('Membership Application Tag')
verbose_name_plural = _('Membership Application Tags')
revisions.default_revision_manager.register(MembershipApplicationTag)
class MembershipApplication(MemberCommon):
received = models.DateField(default=timezone.now)
tags = models.ManyToManyField(MembershipApplicationTag, related_name='+', verbose_name=_("Application tags"), blank=True)
notes = MarkdownField(verbose_name=_("Notes"), blank=True)
@call_saves('MEMBERAPPLICATION_CALLBACKS_HANDLER')
def save(self, *args, **kwargs):
return super().save(*args, **kwargs)
def validate_unique(self, exclude=None):
if exclude and 'email' in exclude:
return super().validate_unique(exclude)
if Member.objects.filter(email=self.email).count():
# TODO: Figure out the exact format the default form validators use and use that ?
raise ValidationError({'email': ValidationError(_('Member with this email already exists'), code='unique')})
return super().validate_unique(exclude)
def approve(self, set_mtypes):
h = get_handler_instance('MEMBERAPPLICATION_CALLBACKS_HANDLER')
with transaction.atomic(), revisions.create_revision():
m = Member()
m.fname = self.fname
m.lname = self.lname
m.city = self.city
m.email = self.email
m.phone = self.phone
m.nick = self.nick
if h:
h.on_approving(self, m)
m.save()
if set_mtypes:
m.mtypes = set_mtypes
m.save()
if self.notes:
n = MemberNote()
n.notes = self.notes
n.member = m
n.save()
if h:
h.on_approved(self, m)
self.delete()
class Meta:
verbose_name = _('Membership Application')
verbose_name_plural = _('Membership Applications')
revisions.default_revision_manager.register(MembershipApplication)
class MemberNote(AsylumModel):
stamp = models.DateTimeField(_("Datetime"), default=timezone.now, db_index=True)
notes = MarkdownField(verbose_name=_("Notes"), blank=False)
member = models.ForeignKey(Member, verbose_name=_("Member"), blank=True, null=True, on_delete=models.CASCADE, related_name='notes')
class Meta:
verbose_name = _('Note')
verbose_name_plural = _('Notes')
ordering = ['member__lname', 'member__fname', '-stamp']
def __str__(self):
return _("Notes about %s on %s") % (self.member, self.stamp)
revisions.default_revision_manager.register(MemberNote)
| mit | -3,186,799,878,011,729,000 | 33.6 | 173 | 0.648961 | false |
MoisesTedeschi/python | Scripts-Python/Modulos-Diversos/python-com-scrapy/Lib/site-packages/pip/_vendor/progress/helpers.py | 24 | 2952 | # Copyright (c) 2012 Giorgos Verigakis <[email protected]>
#
# Permission to use, copy, modify, and distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
from __future__ import print_function
HIDE_CURSOR = '\x1b[?25l'
SHOW_CURSOR = '\x1b[?25h'
class WriteMixin(object):
hide_cursor = False
def __init__(self, message=None, **kwargs):
super(WriteMixin, self).__init__(**kwargs)
self._width = 0
if message:
self.message = message
if self.file and self.file.isatty():
if self.hide_cursor:
print(HIDE_CURSOR, end='', file=self.file)
print(self.message, end='', file=self.file)
self.file.flush()
def write(self, s):
if self.file and self.file.isatty():
b = '\b' * self._width
c = s.ljust(self._width)
print(b + c, end='', file=self.file)
self._width = max(self._width, len(s))
self.file.flush()
def finish(self):
if self.file and self.file.isatty() and self.hide_cursor:
print(SHOW_CURSOR, end='', file=self.file)
class WritelnMixin(object):
hide_cursor = False
def __init__(self, message=None, **kwargs):
super(WritelnMixin, self).__init__(**kwargs)
if message:
self.message = message
if self.file and self.file.isatty() and self.hide_cursor:
print(HIDE_CURSOR, end='', file=self.file)
def clearln(self):
if self.file and self.file.isatty():
print('\r\x1b[K', end='', file=self.file)
def writeln(self, line):
if self.file and self.file.isatty():
self.clearln()
print(line, end='', file=self.file)
self.file.flush()
def finish(self):
if self.file and self.file.isatty():
print(file=self.file)
if self.hide_cursor:
print(SHOW_CURSOR, end='', file=self.file)
from signal import signal, SIGINT
from sys import exit
class SigIntMixin(object):
"""Registers a signal handler that calls finish on SIGINT"""
def __init__(self, *args, **kwargs):
super(SigIntMixin, self).__init__(*args, **kwargs)
signal(SIGINT, self._sigint_handler)
def _sigint_handler(self, signum, frame):
self.finish()
exit(0)
| gpl-3.0 | -3,875,304,096,176,934,400 | 31.43956 | 74 | 0.621951 | false |
SebDieBln/QGIS | python/plugins/processing/algs/taudem/dinfdistdown_multi.py | 12 | 5393 | # -*- coding: utf-8 -*-
"""
***************************************************************************
dinfdistdown_multi.py
---------------------
Date : March 2015
Copyright : (C) 2015 by Alexander Bruy
Email : alexander dot bruy at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Alexander Bruy'
__date__ = 'March 2015'
__copyright__ = '(C) 2015, Alexander Bruy'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
import os
from PyQt4.QtGui import QIcon
from processing.core.GeoAlgorithm import GeoAlgorithm
from processing.core.ProcessingConfig import ProcessingConfig
from processing.core.GeoAlgorithmExecutionException import \
GeoAlgorithmExecutionException
from processing.core.parameters import ParameterFile
from processing.core.parameters import ParameterBoolean
from processing.core.parameters import ParameterSelection
from processing.core.outputs import OutputDirectory
from TauDEMUtils import TauDEMUtils
class DinfDistDownMulti(GeoAlgorithm):
DINF_FLOW_DIR_GRID = 'DINF_FLOW_DIR_GRID'
PIT_FILLED_GRID = 'PIT_FILLED_GRID'
STREAM_GRID = 'STREAM_GRID'
WEIGHT_PATH_GRID = 'WEIGHT_PATH_GRID'
STAT_METHOD = 'STAT_METHOD'
DIST_METHOD = 'DIST_METHOD'
EDGE_CONTAM = 'EDGE_CONTAM'
DIST_DOWN_GRID = 'DIST_DOWN_GRID'
STATISTICS = ['Minimum', 'Maximum', 'Average']
STAT_DICT = {0: 'min', 1: 'max', 2: 'ave'}
DISTANCE = ['Pythagoras', 'Horizontal', 'Vertical', 'Surface']
DIST_DICT = {
0: 'p',
1: 'h',
2: 'v',
3: 's',
}
def getIcon(self):
return QIcon(os.path.dirname(__file__) + '/../../images/taudem.png')
def defineCharacteristics(self):
self.name, self.i18n_name = self.trAlgorithm('D-Infinity Distance Down (multifile)')
self.cmdName = 'dinfdistdown'
self.group, self.i18n_group = self.trAlgorithm('Specialized Grid Analysis tools')
self.addParameter(ParameterFile(self.DINF_FLOW_DIR_GRID,
self.tr('D-Infinity Flow Direction Grid'), True, False))
self.addParameter(ParameterFile(self.PIT_FILLED_GRID,
self.tr('Pit Filled Elevation Grid'), True, False))
self.addParameter(ParameterFile(self.STREAM_GRID,
self.tr('Stream Raster Grid'), True, False))
self.addParameter(ParameterFile(self.WEIGHT_PATH_GRID,
self.tr('Weight Path Grid'), True, True))
self.addParameter(ParameterSelection(self.STAT_METHOD,
self.tr('Statistical Method'), self.STATISTICS, 2))
self.addParameter(ParameterSelection(self.DIST_METHOD,
self.tr('Distance Method'), self.DISTANCE, 1))
self.addParameter(ParameterBoolean(self.EDGE_CONTAM,
self.tr('Check for edge contamination'), True))
self.addOutput(OutputDirectory(self.DIST_DOWN_GRID,
self.tr('D-Infinity Drop to Stream Grid')))
def processAlgorithm(self, progress):
commands = []
commands.append(os.path.join(TauDEMUtils.mpiexecPath(), 'mpiexec'))
processNum = ProcessingConfig.getSetting(TauDEMUtils.MPI_PROCESSES)
if processNum <= 0:
raise GeoAlgorithmExecutionException(
self.tr('Wrong number of MPI processes used. Please set '
'correct number before running TauDEM algorithms.'))
commands.append('-n')
commands.append(unicode(processNum))
commands.append(os.path.join(TauDEMUtils.taudemMultifilePath(), self.cmdName))
commands.append('-ang')
commands.append(self.getParameterValue(self.DINF_FLOW_DIR_GRID))
commands.append('-fel')
commands.append(self.getParameterValue(self.PIT_FILLED_GRID))
commands.append('-src')
commands.append(self.getParameterValue(self.STREAM_GRID))
wg = self.getParameterValue(self.WEIGHT_PATH_GRID)
if wg is not None:
commands.append('-wg')
commands.append(self.getParameterValue(self.WEIGHT_PATH_GRID))
commands.append('-m')
commands.append(unicode(self.STAT_DICT[self.getParameterValue(
self.STAT_METHOD)]))
commands.append(unicode(self.DIST_DICT[self.getParameterValue(
self.DIST_METHOD)]))
if not self.getParameterValue(self.EDGE_CONTAM):
commands.append('-nc')
commands.append('-dd')
commands.append(self.getOutputValue(self.DIST_DOWN_GRID))
TauDEMUtils.executeTauDEM(commands, progress)
| gpl-2.0 | -378,857,323,110,243,000 | 41.464567 | 96 | 0.578528 | false |
SGCreations/Flask | Work/TriviaMVA/TriviaMVA/env/Lib/site-packages/pip/_vendor/requests/packages/chardet/euctwprober.py | 2994 | 1676 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is mozilla.org code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from .mbcharsetprober import MultiByteCharSetProber
from .codingstatemachine import CodingStateMachine
from .chardistribution import EUCTWDistributionAnalysis
from .mbcssm import EUCTWSMModel
class EUCTWProber(MultiByteCharSetProber):
def __init__(self):
MultiByteCharSetProber.__init__(self)
self._mCodingSM = CodingStateMachine(EUCTWSMModel)
self._mDistributionAnalyzer = EUCTWDistributionAnalysis()
self.reset()
def get_charset_name(self):
return "EUC-TW"
| apache-2.0 | -6,391,776,157,106,891,000 | 39.878049 | 69 | 0.716587 | false |
shsingh/ansible | test/units/modules/network/fortios/test_fortios_vpn_ssl_web_user_group_bookmark.py | 21 | 7790 | # Copyright 2019 Fortinet, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <https://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import json
import pytest
from mock import ANY
from ansible.module_utils.network.fortios.fortios import FortiOSHandler
try:
from ansible.modules.network.fortios import fortios_vpn_ssl_web_user_group_bookmark
except ImportError:
pytest.skip("Could not load required modules for testing", allow_module_level=True)
@pytest.fixture(autouse=True)
def connection_mock(mocker):
connection_class_mock = mocker.patch('ansible.modules.network.fortios.fortios_vpn_ssl_web_user_group_bookmark.Connection')
return connection_class_mock
fos_instance = FortiOSHandler(connection_mock)
def test_vpn_ssl_web_user_group_bookmark_creation(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'success', 'http_method': 'POST', 'http_status': 200}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'vpn_ssl_web_user_group_bookmark': {'name': 'default_name_3'
},
'vdom': 'root'}
is_error, changed, response = fortios_vpn_ssl_web_user_group_bookmark.fortios_vpn_ssl_web(input_data, fos_instance)
expected_data = {'name': 'default_name_3'
}
set_method_mock.assert_called_with('vpn.ssl.web', 'user-group-bookmark', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert changed
assert response['status'] == 'success'
assert response['http_status'] == 200
def test_vpn_ssl_web_user_group_bookmark_creation_fails(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'error', 'http_method': 'POST', 'http_status': 500}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'vpn_ssl_web_user_group_bookmark': {'name': 'default_name_3'
},
'vdom': 'root'}
is_error, changed, response = fortios_vpn_ssl_web_user_group_bookmark.fortios_vpn_ssl_web(input_data, fos_instance)
expected_data = {'name': 'default_name_3'
}
set_method_mock.assert_called_with('vpn.ssl.web', 'user-group-bookmark', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert is_error
assert not changed
assert response['status'] == 'error'
assert response['http_status'] == 500
def test_vpn_ssl_web_user_group_bookmark_removal(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
delete_method_result = {'status': 'success', 'http_method': 'POST', 'http_status': 200}
delete_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.delete', return_value=delete_method_result)
input_data = {
'username': 'admin',
'state': 'absent',
'vpn_ssl_web_user_group_bookmark': {'name': 'default_name_3'
},
'vdom': 'root'}
is_error, changed, response = fortios_vpn_ssl_web_user_group_bookmark.fortios_vpn_ssl_web(input_data, fos_instance)
delete_method_mock.assert_called_with('vpn.ssl.web', 'user-group-bookmark', mkey=ANY, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert changed
assert response['status'] == 'success'
assert response['http_status'] == 200
def test_vpn_ssl_web_user_group_bookmark_deletion_fails(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
delete_method_result = {'status': 'error', 'http_method': 'POST', 'http_status': 500}
delete_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.delete', return_value=delete_method_result)
input_data = {
'username': 'admin',
'state': 'absent',
'vpn_ssl_web_user_group_bookmark': {'name': 'default_name_3'
},
'vdom': 'root'}
is_error, changed, response = fortios_vpn_ssl_web_user_group_bookmark.fortios_vpn_ssl_web(input_data, fos_instance)
delete_method_mock.assert_called_with('vpn.ssl.web', 'user-group-bookmark', mkey=ANY, vdom='root')
schema_method_mock.assert_not_called()
assert is_error
assert not changed
assert response['status'] == 'error'
assert response['http_status'] == 500
def test_vpn_ssl_web_user_group_bookmark_idempotent(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'error', 'http_method': 'DELETE', 'http_status': 404}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'vpn_ssl_web_user_group_bookmark': {'name': 'default_name_3'
},
'vdom': 'root'}
is_error, changed, response = fortios_vpn_ssl_web_user_group_bookmark.fortios_vpn_ssl_web(input_data, fos_instance)
expected_data = {'name': 'default_name_3'
}
set_method_mock.assert_called_with('vpn.ssl.web', 'user-group-bookmark', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert not changed
assert response['status'] == 'error'
assert response['http_status'] == 404
def test_vpn_ssl_web_user_group_bookmark_filter_foreign_attributes(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'success', 'http_method': 'POST', 'http_status': 200}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'vpn_ssl_web_user_group_bookmark': {
'random_attribute_not_valid': 'tag', 'name': 'default_name_3'
},
'vdom': 'root'}
is_error, changed, response = fortios_vpn_ssl_web_user_group_bookmark.fortios_vpn_ssl_web(input_data, fos_instance)
expected_data = {'name': 'default_name_3'
}
set_method_mock.assert_called_with('vpn.ssl.web', 'user-group-bookmark', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert changed
assert response['status'] == 'success'
assert response['http_status'] == 200
| gpl-3.0 | -843,241,034,057,633,400 | 40.216931 | 142 | 0.66932 | false |
infobloxopen/infoblox-netmri | infoblox_netmri/api/broker/v2_3_0/issue_adhoc_broker.py | 19 | 1697 | from ..broker import Broker
class IssueAdhocBroker(Broker):
controller = "issue_adhocs"
def generate_issue(self, **kwargs):
"""Generates an instance of a custom issue.
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` True
| ``default:`` None
:param DeviceID: The ID of the Device associated with the issue to be generated.
:type DeviceID: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` True
| ``default:`` None
:param IssueTypeID: ID of the custom issue to be generated.
:type IssueTypeID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` True
| ``default:`` None
:param Severity: Severity of the issue that will be generated (Error, Warning, Info)
:type Severity: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param BatchID: The batch id associated with the issue.
:type BatchID: Integer
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return IssueID: The IssueID designated to the generated custom issue
:rtype IssueID: String
"""
return self.api_request(self._get_method_fullname("generate_issue"), kwargs)
| apache-2.0 | 2,321,143,481,276,814,000 | 29.303571 | 97 | 0.507955 | false |
jinnykoo/wuyisj.com | src/oscar/apps/basket/forms.py | 26 | 10178 | from django import forms
from django.conf import settings
from django.forms.models import modelformset_factory, BaseModelFormSet
from django.db.models import Sum
from django.utils.translation import ugettext_lazy as _
from oscar.core.loading import get_model
from oscar.forms import widgets
Line = get_model('basket', 'line')
Basket = get_model('basket', 'basket')
Product = get_model('catalogue', 'product')
class BasketLineForm(forms.ModelForm):
save_for_later = forms.BooleanField(
initial=False, required=False, label=_('Save for Later'))
def __init__(self, strategy, *args, **kwargs):
super(BasketLineForm, self).__init__(*args, **kwargs)
self.instance.strategy = strategy
def clean_quantity(self):
qty = self.cleaned_data['quantity']
if qty > 0:
self.check_max_allowed_quantity(qty)
self.check_permission(qty)
return qty
def check_max_allowed_quantity(self, qty):
is_allowed, reason = self.instance.basket.is_quantity_allowed(qty)
if not is_allowed:
raise forms.ValidationError(reason)
def check_permission(self, qty):
policy = self.instance.purchase_info.availability
is_available, reason = policy.is_purchase_permitted(
quantity=qty)
if not is_available:
raise forms.ValidationError(reason)
class Meta:
model = Line
fields = ['quantity']
class BaseBasketLineFormSet(BaseModelFormSet):
def __init__(self, strategy, *args, **kwargs):
self.strategy = strategy
super(BaseBasketLineFormSet, self).__init__(*args, **kwargs)
def _construct_form(self, i, **kwargs):
return super(BaseBasketLineFormSet, self)._construct_form(
i, strategy=self.strategy, **kwargs)
def _should_delete_form(self, form):
"""
Quantity of zero is treated as if the user checked the DELETE checkbox,
which results in the basket line being deleted
"""
if super(BaseBasketLineFormSet, self)._should_delete_form(form):
return True
if self.can_delete and 'quantity' in form.cleaned_data:
return form.cleaned_data['quantity'] == 0
BasketLineFormSet = modelformset_factory(
Line, form=BasketLineForm, formset=BaseBasketLineFormSet, extra=0,
can_delete=True)
class SavedLineForm(forms.ModelForm):
move_to_basket = forms.BooleanField(initial=False, required=False,
label=_('Move to Basket'))
class Meta:
model = Line
fields = ('id', 'move_to_basket')
def __init__(self, strategy, basket, *args, **kwargs):
self.strategy = strategy
self.basket = basket
super(SavedLineForm, self).__init__(*args, **kwargs)
def clean(self):
cleaned_data = super(SavedLineForm, self).clean()
if not cleaned_data['move_to_basket']:
# skip further validation (see issue #666)
return cleaned_data
# Get total quantity of all lines with this product (there's normally
# only one but there can be more if you allow product options).
lines = self.basket.lines.filter(product=self.instance.product)
current_qty = lines.aggregate(Sum('quantity'))['quantity__sum'] or 0
desired_qty = current_qty + self.instance.quantity
result = self.strategy.fetch_for_product(self.instance.product)
is_available, reason = result.availability.is_purchase_permitted(
quantity=desired_qty)
if not is_available:
raise forms.ValidationError(reason)
return cleaned_data
class BaseSavedLineFormSet(BaseModelFormSet):
def __init__(self, strategy, basket, *args, **kwargs):
self.strategy = strategy
self.basket = basket
super(BaseSavedLineFormSet, self).__init__(*args, **kwargs)
def _construct_form(self, i, **kwargs):
return super(BaseSavedLineFormSet, self)._construct_form(
i, strategy=self.strategy, basket=self.basket, **kwargs)
SavedLineFormSet = modelformset_factory(Line, form=SavedLineForm,
formset=BaseSavedLineFormSet, extra=0,
can_delete=True)
class BasketVoucherForm(forms.Form):
code = forms.CharField(max_length=128, label=_('Code'))
def __init__(self, *args, **kwargs):
super(BasketVoucherForm, self).__init__(*args, **kwargs)
def clean_code(self):
return self.cleaned_data['code'].strip().upper()
class AddToBasketForm(forms.Form):
quantity = forms.IntegerField(initial=1, min_value=1, label=_('Quantity'))
def __init__(self, basket, product, *args, **kwargs):
# Note, the product passed in here isn't necessarily the product being
# added to the basket. For child products, it is the *parent* product
# that gets passed to the form. An optional product_id param is passed
# to indicate the ID of the child product being added to the basket.
self.basket = basket
self.parent_product = product
super(AddToBasketForm, self).__init__(*args, **kwargs)
# Dynamically build fields
if product.is_parent:
self._create_parent_product_fields(product)
self._create_product_fields(product)
# Dynamic form building methods
def _create_parent_product_fields(self, product):
"""
Adds the fields for a "group"-type product (eg, a parent product with a
list of children.
Currently requires that a stock record exists for the children
"""
choices = []
disabled_values = []
for child in product.children.all():
# Build a description of the child, including any pertinent
# attributes
attr_summary = child.attribute_summary
if attr_summary:
summary = attr_summary
else:
summary = child.get_title()
# Check if it is available to buy
info = self.basket.strategy.fetch_for_product(child)
if not info.availability.is_available_to_buy:
disabled_values.append(child.id)
choices.append((child.id, summary))
self.fields['child_id'] = forms.ChoiceField(
choices=tuple(choices), label=_("Variant"),
widget=widgets.AdvancedSelect(disabled_values=disabled_values))
def _create_product_fields(self, product):
"""
Add the product option fields.
"""
for option in product.options:
self._add_option_field(product, option)
def _add_option_field(self, product, option):
"""
Creates the appropriate form field for the product option.
This is designed to be overridden so that specific widgets can be used
for certain types of options.
"""
kwargs = {'required': option.is_required}
self.fields[option.code] = forms.CharField(**kwargs)
# Cleaning
def clean_child_id(self):
try:
child = self.parent_product.children.get(
id=self.cleaned_data['child_id'])
except Product.DoesNotExist:
raise forms.ValidationError(
_("Please select a valid product"))
# To avoid duplicate SQL queries, we cache a copy of the loaded child
# product as we're going to need it later.
self.child_product = child
return self.cleaned_data['child_id']
def clean_quantity(self):
# Check that the proposed new line quantity is sensible
qty = self.cleaned_data['quantity']
basket_threshold = settings.OSCAR_MAX_BASKET_QUANTITY_THRESHOLD
if basket_threshold:
total_basket_quantity = self.basket.num_items
max_allowed = basket_threshold - total_basket_quantity
if qty > max_allowed:
raise forms.ValidationError(
_("Due to technical limitations we are not able to ship"
" more than %(threshold)d items in one order. Your"
" basket currently has %(basket)d items.")
% {'threshold': basket_threshold,
'basket': total_basket_quantity})
return qty
@property
def product(self):
"""
The actual product being added to the basket
"""
# Note, the child product attribute is saved in the clean_child_id
# method
return getattr(self, 'child_product', self.parent_product)
def clean(self):
info = self.basket.strategy.fetch_for_product(self.product)
# Check currencies are sensible
if (self.basket.currency and
info.price.currency != self.basket.currency):
raise forms.ValidationError(
_("This product cannot be added to the basket as its currency "
"isn't the same as other products in your basket"))
# Check user has permission to add the desired quantity to their
# basket.
current_qty = self.basket.product_quantity(self.product)
desired_qty = current_qty + self.cleaned_data.get('quantity', 1)
is_permitted, reason = info.availability.is_purchase_permitted(
desired_qty)
if not is_permitted:
raise forms.ValidationError(reason)
return self.cleaned_data
# Helpers
def cleaned_options(self):
"""
Return submitted options in a clean format
"""
options = []
for option in self.parent_product.options:
if option.code in self.cleaned_data:
options.append({
'option': option,
'value': self.cleaned_data[option.code]})
return options
class SimpleAddToBasketForm(AddToBasketForm):
"""
Simplified version of the add to basket form where the quantity is
defaulted to 1 and rendered in a hidden widget
"""
quantity = forms.IntegerField(
initial=1, min_value=1, widget=forms.HiddenInput, label=_('Quantity'))
| bsd-3-clause | -7,666,253,843,431,022,000 | 35.092199 | 79 | 0.620358 | false |
OXPHOS/shogun | examples/undocumented/python/streaming_vw_createcache.py | 5 | 1432 | #!/usr/bin/env python
from shogun import StreamingVwFile
from shogun import StreamingVwCacheFile
from shogun import T_SVMLIGHT
from shogun import StreamingVwFeatures
from shogun import VowpalWabbit
parameter_list=[['../data/fm_train_sparsereal.dat']]
def streaming_vw_createcache (fname):
# First creates a binary cache from an ascii data file.
# and then trains using the StreamingVwCacheFile as input
# Open the input file as a StreamingVwFile
input_file = StreamingVwFile(fname)
# Default file name will be vw_cache.dat.cache
input_file.set_write_to_cache(True)
# Tell VW that the file is in SVMLight format
# Supported types are T_DENSE, T_SVMLIGHT and T_VW
input_file.set_parser_type(T_SVMLIGHT)
## Create a StreamingVwFeatures object, `True' indicating the examples are labelled
#features = StreamingVwFeatures(input_file, True, 1024)
## Create a VW object from the features
#vw = VowpalWabbit(features)
#vw.set_no_training(True)
## Train (in this case does nothing but run over all examples)
#vw.train()
##Finally Train using the generated cache file
## Open the input cache file as a StreamingVwCacheFile
#input_file = StreamingVwCacheFile("vw_cache.dat.cache");
## The rest is exactly as for normal input
#features = StreamingVwFeatures(input_file, True, 1024);
#vw = VowpalWabbit(features)
#vw.train()
##return vw
if __name__ == "__main__":
streaming_vw_createcache(*parameter_list[0])
| gpl-3.0 | 9,087,873,785,597,811,000 | 30.822222 | 84 | 0.754888 | false |
dagwieers/ansible | lib/ansible/modules/web_infrastructure/sophos_utm/utm_proxy_frontend.py | 36 | 8794 | #!/usr/bin/python
# Copyright: (c) 2018, Johannes Brunswicker <[email protected]>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'
}
DOCUMENTATION = """
---
module: utm_proxy_frontend
author:
- Johannes Brunswicker (@MatrixCrawler)
short_description: create, update or destroy reverse_proxy frontend entry in Sophos UTM
description:
- Create, update or destroy a reverse_proxy frontend entry in Sophos UTM.
- This module needs to have the REST Ability of the UTM to be activated.
version_added: "2.8"
options:
name:
description:
- The name of the object. Will be used to identify the entry
required: true
add_content_type_header :
description:
- Whether to add the content type header or not
type: bool
default: False
address:
description:
- The reference name of the network/interface_address object.
default: REF_DefaultInternalAddress
allowed_networks:
description:
- A list of reference names for the allowed networks.
default: ['REF_NetworkAny']
certificate:
description:
- The reference name of the ca/host_key_cert object.
default: ""
comment:
description:
- An optional comment to add to the object
default: ""
disable_compression:
description:
- Whether to enable the compression
type: bool
default: False
domain:
description:
- A list of domain names for the frontend object
exceptions:
description:
- A list of exception ref names (reverse_proxy/exception)
default: []
htmlrewrite:
description:
- Whether to enable html rewrite or not
type: bool
default: False
htmlrewrite_cookies:
description:
- Whether to enable html rewrite cookie or not
type: bool
default: False
implicitredirect:
description:
- Whether to enable implicit redirection or not
type: bool
default: False
lbmethod:
description:
- Which loadbalancer method should be used
choices:
- ""
- bybusyness
- bytraffic
- byrequests
default: bybusyness
locations:
description:
- A list of location ref names (reverse_proxy/location)
default: []
port:
description:
- The frontend http port
default: 80
preservehost:
description:
- Whether to preserve host header
type: bool
default: False
profile:
description:
- The reference string of the reverse_proxy/profile
default: ""
status:
description:
- Whether to activate the frontend entry or not
type: bool
default: True
type:
description:
- Which protocol should be used
choices:
- http
- https
default: http
xheaders:
description:
- Whether to pass the host header or not
type: bool
default: False
extends_documentation_fragment:
- utm
"""
EXAMPLES = """
- name: Create utm proxy_frontend
utm_proxy_frontend:
utm_host: sophos.host.name
utm_token: abcdefghijklmno1234
name: TestFrontendEntry
host: REF_OBJECT_STRING
state: present
- name: Remove utm proxy_frontend
utm_proxy_frontend:
utm_host: sophos.host.name
utm_token: abcdefghijklmno1234
name: TestFrontendEntry
state: absent
"""
RETURN = """
result:
description: The utm object that was created
returned: success
type: complex
contains:
_ref:
description: The reference name of the object
type: string
_locked:
description: Whether or not the object is currently locked
type: boolean
_type:
description: The type of the object
type: string
name:
description: The name of the object
type: string
add_content_type_header:
description: Whether to add the content type header
type: bool
address:
description: The reference name of the address
type: string
allowed_networks:
description: List of reference names of networks associated
type: list
certificate:
description: Reference name of certificate (ca/host_key_cert)
type: string
comment:
description: The comment string
type: string
disable_compression:
description: State of compression support
type: bool
domain:
description: List of hostnames
type: list
exceptions:
description: List of associated proxy exceptions
type: list
htmlrewrite:
description: State of html rewrite
type: bool
htmlrewrite_cookies:
description: Whether the html rewrite cookie will be set
type: bool
implicitredirect:
description: Whether to use implicit redirection
type: bool
lbmethod:
description: The method of loadbalancer to use
type: string
locations:
description: The reference names of reverse_proxy/locations associated with the object
type: list
port:
description: The port of the frontend connection
type: int
preservehost:
description: Preserve host header
type: bool
profile:
description: The associated reverse_proxy/profile
type: string
status:
description: Whether the frontend object is active or not
type: bool
type:
description: The connection type
type: string
xheaders:
description: The xheaders state
type: bool
"""
from ansible.module_utils.utm_utils import UTM, UTMModule
from ansible.module_utils._text import to_native
def main():
endpoint = "reverse_proxy/frontend"
key_to_check_for_changes = ["add_content_type_header", "address", "allowed_networks", "certificate",
"comment", "disable_compression", "domain", "exceptions", "htmlrewrite",
"htmlrewrite_cookies", "implicitredirect", "lbmethod", "locations",
"port", "preservehost", "profile", "status", "type", "xheaders"]
module = UTMModule(
argument_spec=dict(
name=dict(type='str', required=True),
add_content_type_header=dict(type='bool', required=False, default=False),
address=dict(type='str', required=False, default="REF_DefaultInternalAddress"),
allowed_networks=dict(type='list', elements='str', required=False, default=["REF_NetworkAny"]),
certificate=dict(type='str', required=False, default=""),
comment=dict(type='str', required=False, default=""),
disable_compression=dict(type='bool', required=False, default=False),
domain=dict(type='list', elements='str', required=False),
exceptions=dict(type='list', elements='str', required=False, default=[]),
htmlrewrite=dict(type='bool', required=False, default=False),
htmlrewrite_cookies=dict(type='bool', required=False, default=False),
implicitredirect=dict(type='bool', required=False, default=False),
lbmethod=dict(type='str', required=False, default="bybusyness",
choices=['bybusyness', 'bytraffic', 'byrequests', '']),
locations=dict(type='list', elements='str', required=False, default=[]),
port=dict(type='int', required=False, default=80),
preservehost=dict(type='bool', required=False, default=False),
profile=dict(type='str', required=False, default=""),
status=dict(type='bool', required=False, default=True),
type=dict(type='str', required=False, default="http", choices=['http', 'https']),
xheaders=dict(type='bool', required=False, default=False),
)
)
try:
UTM(module, endpoint, key_to_check_for_changes).execute()
except Exception as e:
module.fail_json(msg=to_native(e))
if __name__ == '__main__':
main()
| gpl-3.0 | 7,734,201,160,200,705,000 | 31.813433 | 107 | 0.596884 | false |
bintlabs/pygal-nostatics | pygal/css/style_css.py | 1 | 2851 | # -*- coding: utf-8 -*-
# This file is part of pygal
#
# A python svg graph plotting library
# Copyright Β© 2012-2014 Kozea
#
# This library is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# This library is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with pygal. If not, see <http://www.gnu.org/licenses/>.
contents = """
{{ id }}{
background-color: {{ style.background }};
}
{{ id }}path,
{{ id }}line,
{{ id }}rect,
{{ id }}circle {
-webkit-transition: {{ style.transition }};
-moz-transition: {{ style.transition }};
transition: {{ style.transition }};
}
{{ id }}.graph > .background {
fill: {{ style.background }};
}
{{ id }}.plot > .background {
fill: {{ style.plot_background }};
}
{{ id }}.graph {
fill: {{ style.foreground }};
}
{{ id }}text.no_data {
fill: {{ style.foreground_light }};
}
{{ id }}.title {
fill: {{ style.foreground_light }};
}
{{ id }}.legends .legend text {
fill: {{ style.foreground }};
}
{{ id }}.legends .legend:hover text {
fill: {{ style.foreground_light }};
}
{{ id }}.axis .line {
stroke: {{ style.foreground_light }};
}
{{ id }}.axis .guide.line {
stroke: {{ style.foreground_dark }};
}
{{ id }}.axis .major.line {
stroke: {{ style.foreground }};
}
{{ id }}.axis text.major {
stroke: {{ style.foreground_light }};
fill: {{ style.foreground_light }};
}
{{ id }}.axis.y .guides:hover .guide.line,
{{ id }}.line-graph .axis.x .guides:hover .guide.line,
{{ id }}.stackedline-graph .axis.x .guides:hover .guide.line,
{{ id }}.xy-graph .axis.x .guides:hover .guide.line {
stroke: {{ style.foreground_light }};
}
{{ id }}.axis .guides:hover text {
fill: {{ style.foreground_light }};
}
{{ id }}.reactive {
fill-opacity: {{ style.opacity }};
}
{{ id }}.reactive.active,
{{ id }}.active .reactive {
fill-opacity: {{ style.opacity_hover }};
}
{{ id }}.series text {
fill: {{ style.foreground_light }};
}
{{ id }}.tooltip rect {
fill: {{ style.plot_background }};
stroke: {{ style.foreground_light }};
}
{{ id }}.tooltip text {
fill: {{ style.foreground_light }};
}
{{ id }}.map-element {
fill: {{ style.foreground }};
stroke: {{ style.foreground_dark }} !important;
opacity: .9;
stroke-width: 3;
-webkit-transition: 250ms;
-moz-transition: 250ms;
-o-transition: 250ms;
transition: 250ms;
}
{{ id }}.map-element:hover {
opacity: 1;
stroke-width: 10;
}
{{ colors }}
"""
| lgpl-3.0 | -4,838,728,551,540,473,000 | 21.093023 | 79 | 0.631579 | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.