repo_name
stringlengths 5
100
| path
stringlengths 4
299
| copies
stringclasses 990
values | size
stringlengths 4
7
| content
stringlengths 666
1.03M
| license
stringclasses 15
values | hash
int64 -9,223,351,895,964,839,000
9,223,297,778B
| line_mean
float64 3.17
100
| line_max
int64 7
1k
| alpha_frac
float64 0.25
0.98
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
paulballesty/zxcvbn | data-scripts/build_keyboard_adjacency_graphs.py | 9 | 4051 | #!/usr/bin/python
import sys
import simplejson
def usage():
return '''
constructs adjacency_graphs.coffee from QWERTY and DVORAK keyboard layouts
usage:
%s adjacency_graphs.coffee
''' % sys.argv[0]
qwerty = r'''
`~ 1! 2@ 3# 4$ 5% 6^ 7& 8* 9( 0) -_ =+
qQ wW eE rR tT yY uU iI oO pP [{ ]} \|
aA sS dD fF gG hH jJ kK lL ;: '"
zZ xX cC vV bB nN mM ,< .> /?
'''
dvorak = r'''
`~ 1! 2@ 3# 4$ 5% 6^ 7& 8* 9( 0) [{ ]}
'" ,< .> pP yY fF gG cC rR lL /? =+ \|
aA oO eE uU iI dD hH tT nN sS -_
;: qQ jJ kK xX bB mM wW vV zZ
'''
keypad = r'''
/ * -
7 8 9 +
4 5 6
1 2 3
0 .
'''
mac_keypad = r'''
= / *
7 8 9 -
4 5 6 +
1 2 3
0 .
'''
def get_slanted_adjacent_coords(x, y):
'''
returns the six adjacent coordinates on a standard keyboard, where each row is slanted to the
right from the last. adjacencies are clockwise, starting with key to the left, then two keys
above, then right key, then two keys below. (that is, only near-diagonal keys are adjacent,
so g's coordinate is adjacent to those of t,y,b,v, but not those of r,u,n,c.)
'''
return [(x-1, y), (x, y-1), (x+1, y-1), (x+1, y), (x, y+1), (x-1, y+1)]
def get_aligned_adjacent_coords(x, y):
'''
returns the nine clockwise adjacent coordinates on a keypad, where each row is vert aligned.
'''
return [(x-1, y), (x-1, y-1), (x, y-1), (x+1, y-1), (x+1, y), (x+1, y+1), (x, y+1), (x-1, y+1)]
def build_graph(layout_str, slanted):
'''
builds an adjacency graph as a dictionary: {character: [adjacent_characters]}.
adjacent characters occur in a clockwise order.
for example:
* on qwerty layout, 'g' maps to ['fF', 'tT', 'yY', 'hH', 'bB', 'vV']
* on keypad layout, '7' maps to [None, None, None, '=', '8', '5', '4', None]
'''
position_table = {} # maps from tuple (x,y) -> characters at that position.
tokens = layout_str.split()
token_size = len(tokens[0])
x_unit = token_size + 1 # x position unit len is token len plus 1 for the following whitespace.
adjacency_func = get_slanted_adjacent_coords if slanted else get_aligned_adjacent_coords
assert all(len(token) == token_size for token in tokens), 'token len mismatch:\n ' + layout_str
for y, line in enumerate(layout_str.split('\n')):
# the way I illustrated keys above, each qwerty row is indented one space in from the last
slant = y - 1 if slanted else 0
for token in line.split():
x, remainder = divmod(line.index(token) - slant, x_unit)
assert remainder == 0, 'unexpected x offset for %s in:\n%s' % (token, layout_str)
position_table[(x,y)] = token
adjacency_graph = {}
for (x,y), chars in position_table.iteritems():
for char in chars:
adjacency_graph[char] = []
for coord in adjacency_func(x, y):
# position in the list indicates direction
# (for qwerty, 0 is left, 1 is top, 2 is top right, ...)
# for edge chars like 1 or m, insert None as a placeholder when needed
# so that each character in the graph has a same-length adjacency list.
adjacency_graph[char].append(position_table.get(coord, None))
return adjacency_graph
if __name__ == '__main__':
if len(sys.argv) != 2:
print usage()
sys.exit(0)
with open(sys.argv[1], 'w') as f:
f.write('# generated by scripts/build_keyboard_adjacency_graphs.py\n')
f.write('adjacency_graphs = \n ')
lines = []
for graph_name, args in [('qwerty', (qwerty, True)),
('dvorak', (dvorak, True)),
('keypad', (keypad, False)),
('mac_keypad', (mac_keypad, False))]:
graph = build_graph(*args)
lines.append('%s: %s' % (graph_name, simplejson.dumps(graph, sort_keys=True)))
f.write('\n '.join(lines))
f.write('\n\n')
f.write('module.exports = adjacency_graphs\n')
sys.exit(0)
| mit | 8,791,864,322,519,584,000 | 35.827273 | 99 | 0.567267 | false |
rghe/ansible | contrib/inventory/nagios_ndo.py | 42 | 3808 | #!/usr/bin/env python
# (c) 2014, Jonathan Lestrelin <[email protected]>
#
# This file is part of Ansible,
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
"""
Nagios NDO external inventory script.
========================================
Returns hosts and hostgroups from Nagios NDO.
Configuration is read from `nagios_ndo.ini`.
"""
import os
import argparse
import sys
try:
import configparser
except ImportError:
import ConfigParser
configparser = ConfigParser
import json
try:
from sqlalchemy import text
from sqlalchemy.engine import create_engine
except ImportError:
sys.exit("Error: SQLAlchemy is needed. Try something like: pip install sqlalchemy")
class NagiosNDOInventory(object):
def read_settings(self):
config = configparser.SafeConfigParser()
config.read(os.path.dirname(os.path.realpath(__file__)) + '/nagios_ndo.ini')
if config.has_option('ndo', 'database_uri'):
self.ndo_database_uri = config.get('ndo', 'database_uri')
def read_cli(self):
parser = argparse.ArgumentParser()
parser.add_argument('--host', nargs=1)
parser.add_argument('--list', action='store_true')
self.options = parser.parse_args()
def get_hosts(self):
engine = create_engine(self.ndo_database_uri)
connection = engine.connect()
select_hosts = text("SELECT display_name \
FROM nagios_hosts")
select_hostgroups = text("SELECT alias \
FROM nagios_hostgroups")
select_hostgroup_hosts = text("SELECT h.display_name \
FROM nagios_hostgroup_members hgm, nagios_hosts h, nagios_hostgroups hg \
WHERE hgm.hostgroup_id = hg.hostgroup_id \
AND hgm.host_object_id = h.host_object_id \
AND hg.alias =:hostgroup_alias")
hosts = connection.execute(select_hosts)
self.result['all']['hosts'] = [host['display_name'] for host in hosts]
for hostgroup in connection.execute(select_hostgroups):
hostgroup_alias = hostgroup['alias']
self.result[hostgroup_alias] = {}
hosts = connection.execute(select_hostgroup_hosts, hostgroup_alias=hostgroup_alias)
self.result[hostgroup_alias]['hosts'] = [host['display_name'] for host in hosts]
def __init__(self):
self.defaultgroup = 'group_all'
self.ndo_database_uri = None
self.options = None
self.read_settings()
self.read_cli()
self.result = {}
self.result['all'] = {}
self.result['all']['hosts'] = []
self.result['_meta'] = {}
self.result['_meta']['hostvars'] = {}
if self.ndo_database_uri:
self.get_hosts()
if self.options.host:
print(json.dumps({}))
elif self.options.list:
print(json.dumps(self.result))
else:
sys.exit("usage: --list or --host HOSTNAME")
else:
sys.exit("Error: Database configuration is missing. See nagios_ndo.ini.")
NagiosNDOInventory()
| gpl-3.0 | -210,779,300,152,225,630 | 33.93578 | 112 | 0.612658 | false |
balachandrana/pythonista_stash_utilities | catbinascii.py | 1 | 2770 | """ cat bin asccii
Examples:
"""
"""
todo:
__doc__
test examples
"""
import argparse
import fileinput
import os
import re
import sys
import binascii
def main(args):
ap = argparse.ArgumentParser(description=__doc__)
ap.add_argument('files', nargs='*', help='files to be processed')
ap.add_argument('-u', '--catunbinascii', action='store_true',
help='convert binascii to binary file')
ap.add_argument('-b', '--buffersize', action='store',
help='buffer size')
ns = ap.parse_args(args)
if not ns.buffersize:
ns.buffersize = 32
else:
ns.buffersize = int(ns.buffersize)
files = None
if not ns.catunbinascii:
try:
files = [f for f in ns.files if not os.path.isdir(f)]
for f in files:
fp = open(f, "rb")
buf = fp.read(ns.buffersize)
while buf:
print binascii.hexlify(buf)
buf = fp.read(ns.buffersize)
fp.close()
except IOError as err:
sys.stderr.write("catbinascii: {}: {!s}".format(
type(err).__name__, err))
else:
try:
if ns.files:
if len(ns.files) == 1:
fps = sys.stdin
if not os.path.isdir(ns.files[0]):
fpd = open(files[1], "wb")
else:
sys.stderr.write("%s destination file is a directory\n"
% ns.files[0])
sys.exit(0)
elif len(ns.files) == 2:
if not os.path.isdir(ns.files[0]):
fps = open(ns.files[0])
else:
sys.stderr.write(
"%s source file is a directory\n" % ns.files[0])
sys.exit(0)
if not os.path.isdir(ns.files[1]):
fpd = open(ns.files[1], "wb")
else:
sys.stderr.write("%s destination file is a directory\n"
% ns.files[1])
sys.exit(0)
else:
sys.stderr.write("too many files specified\n")
sys.exit(0)
line = fps.readline()
while line:
fpd.write(binascii.unhexlify(line.strip()))
line = fps.readline()
fps.close()
fpd.close()
except IOError as err:
sys.stderr.write("catbinascii: {}: {!s}".format(
type(err).__name__, err))
if __name__ == "__main__":
main(sys.argv[1:])
| mit | -7,256,439,255,489,745,000 | 31.209302 | 79 | 0.438628 | false |
devs1991/test_edx_docmode | lms/djangoapps/shoppingcart/utils.py | 103 | 2782 | """
Utility methods for the Shopping Cart app
"""
from django.conf import settings
from microsite_configuration import microsite
from pdfminer.pdfparser import PDFParser
from pdfminer.pdfdocument import PDFDocument
from pdfminer.pdfinterp import PDFResourceManager, PDFPageInterpreter
from pdfminer.converter import PDFPageAggregator
from pdfminer.pdfpage import PDFPage
from pdfminer.layout import LAParams, LTTextBox, LTTextLine, LTFigure
def is_shopping_cart_enabled():
"""
Utility method to check the various configuration to verify that
all of the settings have been enabled
"""
enable_paid_course_registration = microsite.get_value(
'ENABLE_PAID_COURSE_REGISTRATION',
settings.FEATURES.get('ENABLE_PAID_COURSE_REGISTRATION')
)
enable_shopping_cart = microsite.get_value(
'ENABLE_SHOPPING_CART',
settings.FEATURES.get('ENABLE_SHOPPING_CART')
)
return enable_paid_course_registration and enable_shopping_cart
def parse_pages(pdf_buffer, password):
"""
With an PDF buffer object, get the pages, parse each one, and return the entire pdf text
"""
# Create a PDF parser object associated with the file object.
parser = PDFParser(pdf_buffer)
# Create a PDF document object that stores the document structure.
# Supply the password for initialization.
document = PDFDocument(parser, password)
resource_manager = PDFResourceManager()
la_params = LAParams()
device = PDFPageAggregator(resource_manager, laparams=la_params)
interpreter = PDFPageInterpreter(resource_manager, device)
text_content = [] # a list of strings, each representing text collected from each page of the doc
for page in PDFPage.create_pages(document):
interpreter.process_page(page)
# receive the LTPage object for this page
layout = device.get_result()
# layout is an LTPage object which may contain
# child objects like LTTextBox, LTFigure, LTImage, etc.
text_content.append(parse_lt_objects(layout._objs)) # pylint: disable=protected-access
return text_content
def parse_lt_objects(lt_objects):
"""
Iterate through the list of LT* objects and capture the text data contained in each object
"""
text_content = []
for lt_object in lt_objects:
if isinstance(lt_object, LTTextBox) or isinstance(lt_object, LTTextLine):
# text
text_content.append(lt_object.get_text().encode('utf-8'))
elif isinstance(lt_object, LTFigure):
# LTFigure objects are containers for other LT* objects, so recurse through the children
text_content.append(parse_lt_objects(lt_object._objs)) # pylint: disable=protected-access
return '\n'.join(text_content)
| agpl-3.0 | 4,464,919,487,932,740,000 | 36.594595 | 102 | 0.714234 | false |
thomazs/geraldo | site/newsite/django_1_0/django/contrib/markup/templatetags/markup.py | 38 | 3646 | """
Set of "markup" template filters for Django. These filters transform plain text
markup syntaxes to HTML; currently there is support for:
* Textile, which requires the PyTextile library available at
http://dealmeida.net/projects/textile/
* Markdown, which requires the Python-markdown library from
http://www.freewisdom.org/projects/python-markdown
* ReStructuredText, which requires docutils from http://docutils.sf.net/
In each case, if the required library is not installed, the filter will
silently fail and return the un-marked-up text.
"""
from django import template
from django.conf import settings
from django.utils.encoding import smart_str, force_unicode
from django.utils.safestring import mark_safe
register = template.Library()
def textile(value):
try:
import textile
except ImportError:
if settings.DEBUG:
raise template.TemplateSyntaxError, "Error in {% textile %} filter: The Python textile library isn't installed."
return force_unicode(value)
else:
return mark_safe(force_unicode(textile.textile(smart_str(value), encoding='utf-8', output='utf-8')))
textile.is_safe = True
def markdown(value, arg=''):
"""
Runs Markdown over a given value, optionally using various
extensions python-markdown supports.
Syntax::
{{ value|markdown:"extension1_name,extension2_name..." }}
To enable safe mode, which strips raw HTML and only returns HTML
generated by actual Markdown syntax, pass "safe" as the first
extension in the list.
If the version of Markdown in use does not support extensions,
they will be silently ignored.
"""
try:
import markdown
except ImportError:
if settings.DEBUG:
raise template.TemplateSyntaxError, "Error in {% markdown %} filter: The Python markdown library isn't installed."
return force_unicode(value)
else:
# markdown.version was first added in 1.6b. The only version of markdown
# to fully support extensions before 1.6b was the shortlived 1.6a.
if hasattr(markdown, 'version'):
extensions = [e for e in arg.split(",") if e]
if len(extensions) > 0 and extensions[0] == "safe":
extensions = extensions[1:]
safe_mode = True
else:
safe_mode = False
# Unicode support only in markdown v1.7 or above. Version_info
# exist only in markdown v1.6.2rc-2 or above.
if getattr(markdown, "version_info", None) < (1,7):
return mark_safe(force_unicode(markdown.markdown(smart_str(value), extensions, safe_mode=safe_mode)))
else:
return mark_safe(markdown.markdown(force_unicode(value), extensions, safe_mode=safe_mode))
else:
return mark_safe(force_unicode(markdown.markdown(smart_str(value))))
markdown.is_safe = True
def restructuredtext(value):
try:
from docutils.core import publish_parts
except ImportError:
if settings.DEBUG:
raise template.TemplateSyntaxError, "Error in {% restructuredtext %} filter: The Python docutils library isn't installed."
return force_unicode(value)
else:
docutils_settings = getattr(settings, "RESTRUCTUREDTEXT_FILTER_SETTINGS", {})
parts = publish_parts(source=smart_str(value), writer_name="html4css1", settings_overrides=docutils_settings)
return mark_safe(force_unicode(parts["fragment"]))
restructuredtext.is_safe = True
register.filter(textile)
register.filter(markdown)
register.filter(restructuredtext)
| lgpl-3.0 | -7,105,715,288,604,475,000 | 37.787234 | 134 | 0.679649 | false |
geomagpy/MARTAS | libmqtt/lorawanserver.py | 1 | 7323 | from __future__ import print_function
from __future__ import absolute_import
# ###################################################################
# Import packages
# ###################################################################
from magpy.stream import DataStream, KEYLIST, NUMKEYLIST, subtractStreams
import struct
from datetime import datetime
import json
import base64
import binascii
def datetime2array(t):
return [t.year,t.month,t.day,t.hour,t.minute,t.second,t.microsecond]
## LORA-ZAMG - protocol
##
class lorawanserver(object):
"""
application/3/node/0018b2200000034a/rx {"applicationID":"3","applicationName":"Temperature-and-Humidity","deviceName":"TITEC-Multisensor","devEUI":"0018b2200000034a","rxInfo":[{"gatewayID":"00800000a0001285","name":"MTCDT_AEPGW2","rssi":-49,"loRaSNR":7.2,"location":{"latitude":48.248422399999995,"longitude":16.3520512,"altitude":0}}],"txInfo":{"frequency":868500000,"dr":5},"adr":true,"fCnt":457,"fPort":1,"data":"QgASEzQVIg/HVA=="}
content suggestion: appeui, deveui, sensorname, locationname, sensormodell, -> rest beelike
topic suggestions:
headline/station/sensor
ideally, headline is a unique formatidentifier
e.g.
loraz/schwarzenbergplatz/adeunis od
warum: so kann man relativ systematisch stationen abfragen
mobile sensoren ohne festen standort:
loraz/mobile/adeunis
"""
def __init__(self):
"""
"""
print (" -> Initializing loraWAN server routines ...")
#self.payload = payload
#self.topic = topic
self.topicidentifier = {'startswith':'application','endswith':'rx'}
self.datakeytranslator = {'tl':['t1','degC'], 'rf':['var1','per'], 'corr':['var5','none']}
self.identifier = {}
self.headdict = {}
self.headstream = {}
def GetPayload(self, payload, topic):
loradict = json.loads(payload)
# convert loradict to headdict (header) and data_bin
newpayload, sensorid, headline, header = self.loradict2datastruct(loradict)
return newpayload, sensorid, headline, header, self.identifier
def b2v7(self,b1,b2,div):
val = ((b2 << 8) + b1)/ float(div)
return val
def b2v(self,b1,b2,b3,off):
v = ((b1 << 8) + b2 << 8) + b3
val = (v/100000. *6.25) - off
return val
def loradict2datastruct(self, loradict):
datakeytranslator = {'tl':['t1','degC'], 'rf':['var1','per'], 'corr':['var5','none'], 'bat':['var4','per']}
rxdict = loradict.get('rxInfo')[0]
locdict = rxdict.get('location')
header = {}
header['SensorName'] = loradict.get('deviceName','LORA')
header['SensorDescription'] = loradict.get('applicationName','not specified')
header['SensorSerialNum'] = loradict.get('devEUI','')
header['SensorGroup'] = loradict.get('deviceName','LORA')
sensorid = header['SensorName'][:5].replace('-','') + '_' + header['SensorSerialNum'] + '_0001'
header['SensorID'] = sensorid
header['StationID'] = rxdict.get('gatewayID','undefined')
header['StationName'] = rxdict.get('name','undefined')
header['StationLongitude'] = locdict.get('longitude','')
header['StationLatitude'] = locdict.get('latitude','')
if not locdict.get('longitude','') == '':
header['StationLocationReference'] = 'WGS84, EPSG: 4326'
if locdict.get('altitude','') in ['',0,'0']:
alt = ''
else:
alt = locdict.get('altitude')
header['StationElevation'] = alt
if not alt == '':
header['StationElevationRef'] = 'm NN'
datacode = loradict.get('data')
# convert to something like datadict = {"tl":21.75,"rf":36.9}
barray = bytearray(base64.b64decode(datacode))
print ("Device:", loradict.get('deviceName'))
print ("Length Bytearray:", len(barray))
if len(barray) == 10:
temp = self.b2v(barray[3],barray[4],barray[5],55)
rf = self.b2v(barray[7],barray[8],barray[9],25)
datadict = {"tl":temp, "rf":rf}
elif len(barray) == 7:
print ("Found Bytearray 7 with code", datacode)
temp = self.b2v7(barray[1],barray[2],100)
rf = self.b2v7(barray[3],barray[4],100)
bat = self.b2v7(barray[5],barray[6],1)
datadict = {"tl":temp, "rf":rf, "bat":bat}
else:
print ("Found Bytearray of length {} with code", len(barray), datacode)
print ("Payload looks like", loradict)
temp = 999.0
rf = -10.0
datadict = {"tl":temp, "rf":rf}
keylist, elemlist, unitlist, multilist = [],[],[],[]
if not loradict.get('DateTime','') == '':
time = datetime.strptime(loradict.get('DateTime'),"%Y-%m-%dT%H:%M:%S.%fZ")
elif not loradict.get('DatumSec','') == '':
time = datetime.strptime(loradict.get('DatumSec'),"%Y-%m-%dT%H:%M:%S.%fZ")
else:
time = datetime.utcnow()
datalst = datetime2array(time)
packstr = '6hL'
for elem in datadict:
if elem in datakeytranslator:
key = datakeytranslator[elem][0]
unit = datakeytranslator[elem][1]
keylist.append(key)
elemlist.append(elem)
unitlist.append(unit)
multilist.append(1000)
packstr += "l"
datalst.append(int(datadict[elem]*1000))
#print (elem, datadict[elem])
datalst = [str(elem) for elem in datalst]
dataline =','.join(datalst)
#print ("DATA", dataline)
self.identifier[sensorid+':packingcode'] = packstr
self.identifier[sensorid+':keylist'] = keylist
self.identifier[sensorid+':elemlist'] = elemlist
self.identifier[sensorid+':unitlist'] = unitlist
self.identifier[sensorid+':multilist'] = multilist
def identifier2line(dic, sensorid):
p1 = dic.get(sensorid+':packingcode')
p2 = dic.get(sensorid+':keylist')
p3 = dic.get(sensorid+':elemlist')
p4 = dic.get(sensorid+':unitlist')
p5 = dic.get(sensorid+':multilist')
p5 = [str(elem) for elem in p5]
size = struct.calcsize(p1)
line = "# MagPyBin {} [{}] [{}] [{}] [{}] {} {}".format(sensorid,','.join(p2),','.join(p3),','.join(p4),','.join(p5),p1,size)
return line
headline = identifier2line(self.identifier, sensorid)
#self.headstream[sensorid] = create_head_dict(self.headdict[sensorid],sensorid)
#self.headstream[sensorid] = merge_two_dicts(self.headstream[sensorid], header)
#print ("HEAD1", headdict[sensorid])
#print ("HEAD2", headstream[sensorid])
print ("success")
return dataline, sensorid, headline, header
| gpl-3.0 | 7,231,067,676,429,583,000 | 43.114458 | 438 | 0.543083 | false |
kobotoolbox/kpi | kpi/tests/api/v1/test_api_permissions.py | 1 | 5983 | # coding: utf-8
from django.contrib.auth.models import User, Permission
from django.urls import reverse
from django.utils import timezone
from rest_framework import status
from kpi.constants import ASSET_TYPE_COLLECTION
from kpi.models import Asset, ObjectPermission
from kpi.models.object_permission import get_anonymous_user
# importing module instead of the class, avoid running the tests twice
from kpi.tests.api.v2 import test_api_permissions
from kpi.tests.kpi_test_case import KpiTestCase
class ApiAnonymousPermissionsTestCase(test_api_permissions.ApiAnonymousPermissionsTestCase):
URL_NAMESPACE = None
class ApiPermissionsPublicAssetTestCase(test_api_permissions.ApiPermissionsPublicAssetTestCase):
URL_NAMESPACE = None
class ApiPermissionsTestCase(test_api_permissions.ApiPermissionsTestCase):
URL_NAMESPACE = None
class ApiAssignedPermissionsTestCase(KpiTestCase):
"""
An obnoxiously large amount of code to test that the endpoint for listing
assigned permissions complies with the following rules:
* Superusers see it all (thank goodness for pagination)
* Anonymous users see nothing
* Regular users see everything that concerns them, namely all
their own permissions and all the owners' permissions for all objects
to which they have been assigned any permission
See also `kpi.filters.KpiAssignedObjectPermissionsFilter`
"""
def setUp(self):
super().setUp()
self.anon = get_anonymous_user()
self.super = User.objects.get(username='admin')
self.super_password = 'pass'
self.someuser = User.objects.get(username='someuser')
self.someuser_password = 'someuser'
self.anotheruser = User.objects.get(username='anotheruser')
self.anotheruser_password = 'anotheruser'
def create_object_with_specific_pk(model, pk, **kwargs):
obj = model()
obj.pk = pk
for k, v in kwargs.items():
setattr(obj, k, v)
obj.save()
return obj
self.collection = Asset.objects.create(
asset_type=ASSET_TYPE_COLLECTION, owner=self.someuser
)
self.asset = Asset.objects.create(owner=self.someuser)
def test_anon_cannot_list_permissions(self):
self.asset.assign_perm(self.anon, 'view_asset')
self.assertTrue(self.anon.has_perm('view_asset', self.asset))
url = reverse('objectpermission-list')
response = self.client.get(url)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertListEqual(response.data['results'], [])
self.asset.remove_perm(self.anon, 'view_asset')
self.assertFalse(self.anon.has_perm('view_asset', self.asset))
def test_user_sees_relevant_permissions_on_assigned_objects(self):
# A user with explicitly-assigned permissions should see their
# own permissions and the owner's permissions, but not permissions
# assigned to other users
self.asset.assign_perm(self.anotheruser, 'view_asset')
self.assertTrue(self.anotheruser.has_perm('view_asset', self.asset))
irrelevant_user = User.objects.create(username='mindyourown')
self.asset.assign_perm(irrelevant_user, 'view_asset')
self.client.login(username=self.anotheruser.username,
password=self.anotheruser_password)
url = reverse('objectpermission-list')
response = self.client.get(url)
self.assertEqual(response.status_code, status.HTTP_200_OK)
returned_uids = [r['uid'] for r in response.data['results']]
all_obj_perms = self.asset.permissions.all()
relevant_obj_perms = all_obj_perms.filter(
user__in=(self.asset.owner, self.anotheruser),
permission__codename__in=self.asset.ASSIGNABLE_PERMISSIONS_BY_TYPE[
self.asset.asset_type
],
)
self.assertListEqual(
sorted(returned_uids),
sorted(relevant_obj_perms.values_list('uid', flat=True)),
)
self.asset.remove_perm(self.anotheruser, 'view_asset')
self.assertFalse(self.anotheruser.has_perm('view_asset', self.asset))
def test_user_cannot_see_permissions_on_unassigned_objects(self):
self.asset.assign_perm(self.anotheruser, 'view_asset')
self.assertTrue(self.anotheruser.has_perm('view_asset', self.asset))
self.client.login(username=self.anotheruser.username,
password=self.anotheruser_password)
url = reverse('objectpermission-list')
response = self.client.get(url)
self.assertEqual(response.status_code, status.HTTP_200_OK)
returned_uids = [r['uid'] for r in response.data['results']]
other_obj_perms = self.collection.permissions.all()
self.assertFalse(
set(returned_uids).intersection(
other_obj_perms.values_list('uid', flat=True)
)
)
self.asset.remove_perm(self.anotheruser, 'view_asset')
self.assertFalse(self.anotheruser.has_perm('view_asset', self.asset))
def test_superuser_sees_all_permissions(self):
self.asset.assign_perm(self.anotheruser, 'view_asset')
self.assertTrue(self.anotheruser.has_perm('view_asset', self.asset))
self.client.login(username=self.super.username,
password=self.super_password)
url = reverse('objectpermission-list')
response = self.client.get(url)
self.assertEqual(response.status_code, status.HTTP_200_OK)
returned_uids = [r['uid'] for r in response.data['results']]
self.assertListEqual(
sorted(returned_uids),
sorted(ObjectPermission.objects.values_list('uid', flat=True))
)
self.asset.remove_perm(self.anotheruser, 'view_asset')
self.assertFalse(self.anotheruser.has_perm('view_asset', self.asset))
| agpl-3.0 | 4,349,195,161,084,878,300 | 38.622517 | 96 | 0.669062 | false |
deepmind/dm_control | dm_control/viewer/user_input_test.py | 1 | 6472 | # Copyright 2018 The dm_control Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Tests for the user_input module."""
from absl.testing import absltest
from dm_control.viewer import user_input
import mock
class InputMapTests(absltest.TestCase):
def setUp(self):
super().setUp()
self.mouse = mock.MagicMock()
self.keyboard = mock.MagicMock()
self.input_map = user_input.InputMap(self.mouse, self.keyboard)
self.callback = mock.MagicMock()
def test_clearing_bindings(self):
self.input_map._active_exclusive = 1
self.input_map._action_callbacks = {1: 2}
self.input_map._double_click_callbacks = {3: 4}
self.input_map._plane_callback = [5]
self.input_map._z_axis_callback = [6]
self.input_map.clear_bindings()
self.assertEmpty(self.input_map._action_callbacks)
self.assertEmpty(self.input_map._double_click_callbacks)
self.assertEmpty(self.input_map._plane_callback)
self.assertEmpty(self.input_map._z_axis_callback)
self.assertEqual(
user_input._NO_EXCLUSIVE_KEY, self.input_map._active_exclusive)
def test_binding(self):
self.input_map.bind(self.callback, user_input.KEY_UP)
expected_dict = {
(user_input.KEY_UP, user_input.MOD_NONE): (False, self.callback)}
self.assertDictEqual(expected_dict, self.input_map._action_callbacks)
def test_binding_exclusive(self):
self.input_map.bind(self.callback, user_input.Exclusive(user_input.KEY_UP))
expected_dict = {
(user_input.KEY_UP, user_input.MOD_NONE): (True, self.callback)}
self.assertDictEqual(expected_dict, self.input_map._action_callbacks)
def test_binding_and_invoking_ranges_of_actions(self):
self.input_map.bind(self.callback, user_input.Range(
[user_input.KEY_UP, (user_input.KEY_UP, user_input.MOD_ALT)]))
self.input_map._handle_key(
user_input.KEY_UP, user_input.PRESS, user_input.MOD_NONE)
self.callback.assert_called_once_with(0)
self.callback.reset_mock()
self.input_map._handle_key(
user_input.KEY_UP, user_input.PRESS, user_input.MOD_ALT)
self.callback.assert_called_once_with(1)
def test_binding_planar_action(self):
self.input_map.bind_plane(self.callback)
self.assertLen(self.input_map._plane_callback, 1)
self.assertEqual(self.callback, self.input_map._plane_callback[0])
def test_binding_z_axis_action(self):
self.input_map.bind_z_axis(self.callback)
self.assertLen(self.input_map._z_axis_callback, 1)
self.assertEqual(self.callback, self.input_map._z_axis_callback[0])
def test_invoking_regular_action_in_response_to_click(self):
self.input_map._action_callbacks = {(1, 2): (False, self.callback)}
self.input_map._handle_key(1, user_input.PRESS, 2)
self.callback.assert_called_once()
self.callback.reset_mock()
self.input_map._handle_key(1, user_input.RELEASE, 2)
self.assertEqual(0, self.callback.call_count)
def test_invoking_exclusive_action_in_response_to_click(self):
self.input_map._action_callbacks = {(1, 2): (True, self.callback)}
self.input_map._handle_key(1, user_input.PRESS, 2)
self.callback.assert_called_once_with(True)
self.callback.reset_mock()
self.input_map._handle_key(1, user_input.RELEASE, 2)
self.callback.assert_called_once_with(False)
def test_exclusive_action_blocks_other_actions_until_its_finished(self):
self.input_map._action_callbacks = {
(1, 2): (True, self.callback), (3, 4): (False, self.callback)}
self.input_map._handle_key(1, user_input.PRESS, 2)
self.callback.assert_called_once_with(True)
self.callback.reset_mock()
# Attempting to start other actions (PRESS) or end them (RELEASE)
# amounts to nothing.
self.input_map._handle_key(3, user_input.PRESS, 4)
self.assertEqual(0, self.callback.call_count)
self.input_map._handle_key(3, user_input.RELEASE, 4)
self.assertEqual(0, self.callback.call_count)
# Even attempting to start the same action for the 2nd time fails.
self.input_map._handle_key(1, user_input.PRESS, 2)
self.assertEqual(0, self.callback.call_count)
# Only finishing the action frees up the resources.
self.input_map._handle_key(1, user_input.RELEASE, 2)
self.callback.assert_called_once_with(False)
self.callback.reset_mock()
# Now we can start a new action.
self.input_map._handle_key(3, user_input.PRESS, 4)
self.callback.assert_called_once()
def test_modifiers_required_only_for_exclusive_action_start(self):
activation_modifiers = 2
no_modifiers = 0
self.input_map._action_callbacks = {
(1, activation_modifiers): (True, self.callback)}
self.input_map._handle_key(1, user_input.PRESS, activation_modifiers)
self.callback.assert_called_once_with(True)
self.callback.reset_mock()
self.input_map._handle_key(1, user_input.RELEASE, no_modifiers)
self.callback.assert_called_once_with(False)
def test_invoking_regular_action_in_response_to_double_click(self):
self.input_map._double_click_callbacks = {(1, 2): self.callback}
self.input_map._handle_double_click(1, 2)
self.callback.assert_called_once()
def test_exclusive_actions_dont_respond_to_double_clicks(self):
self.input_map._action_callbacks = {(1, 2): (True, self.callback)}
self.input_map._handle_double_click(1, 2)
self.assertEqual(0, self.callback.call_count)
def test_mouse_move(self):
position = [1, 2]
translation = [3, 4]
self.input_map._plane_callback = [self.callback]
self.input_map._handle_mouse_move(position, translation)
self.callback.assert_called_once_with(position, translation)
def test_mouse_scroll(self):
value = 5
self.input_map._z_axis_callback = [self.callback]
self.input_map._handle_mouse_scroll(value)
self.callback.assert_called_once_with(value)
if __name__ == '__main__':
absltest.main()
| apache-2.0 | 5,779,521,694,902,967,000 | 36.627907 | 79 | 0.696075 | false |
Nepherhotep/django | tests/postgres_tests/test_ranges.py | 98 | 24582 | import datetime
import json
import unittest
from django import forms
from django.core import exceptions, serializers
from django.db import connection
from django.db.models import F
from django.test import TestCase, override_settings
from django.utils import timezone
from . import PostgreSQLTestCase
from .models import RangeLookupsModel, RangesModel
try:
from psycopg2.extras import DateRange, DateTimeTZRange, NumericRange
from django.contrib.postgres import fields as pg_fields, forms as pg_forms
from django.contrib.postgres.validators import (
RangeMaxValueValidator, RangeMinValueValidator,
)
except ImportError:
pass
def skipUnlessPG92(test):
try:
PG_VERSION = connection.pg_version
except AttributeError:
PG_VERSION = 0
if PG_VERSION < 90200:
return unittest.skip('PostgreSQL >= 9.2 required')(test)
return test
@skipUnlessPG92
class TestSaveLoad(TestCase):
def test_all_fields(self):
now = timezone.now()
instance = RangesModel(
ints=NumericRange(0, 10),
bigints=NumericRange(10, 20),
floats=NumericRange(20, 30),
timestamps=DateTimeTZRange(now - datetime.timedelta(hours=1), now),
dates=DateRange(now.date() - datetime.timedelta(days=1), now.date()),
)
instance.save()
loaded = RangesModel.objects.get()
self.assertEqual(instance.ints, loaded.ints)
self.assertEqual(instance.bigints, loaded.bigints)
self.assertEqual(instance.floats, loaded.floats)
self.assertEqual(instance.timestamps, loaded.timestamps)
self.assertEqual(instance.dates, loaded.dates)
def test_range_object(self):
r = NumericRange(0, 10)
instance = RangesModel(ints=r)
instance.save()
loaded = RangesModel.objects.get()
self.assertEqual(r, loaded.ints)
def test_tuple(self):
instance = RangesModel(ints=(0, 10))
instance.save()
loaded = RangesModel.objects.get()
self.assertEqual(NumericRange(0, 10), loaded.ints)
def test_range_object_boundaries(self):
r = NumericRange(0, 10, '[]')
instance = RangesModel(floats=r)
instance.save()
loaded = RangesModel.objects.get()
self.assertEqual(r, loaded.floats)
self.assertTrue(10 in loaded.floats)
def test_unbounded(self):
r = NumericRange(None, None, '()')
instance = RangesModel(floats=r)
instance.save()
loaded = RangesModel.objects.get()
self.assertEqual(r, loaded.floats)
def test_empty(self):
r = NumericRange(empty=True)
instance = RangesModel(ints=r)
instance.save()
loaded = RangesModel.objects.get()
self.assertEqual(r, loaded.ints)
def test_null(self):
instance = RangesModel(ints=None)
instance.save()
loaded = RangesModel.objects.get()
self.assertIsNone(loaded.ints)
@skipUnlessPG92
class TestQuerying(TestCase):
@classmethod
def setUpTestData(cls):
cls.objs = [
RangesModel.objects.create(ints=NumericRange(0, 10)),
RangesModel.objects.create(ints=NumericRange(5, 15)),
RangesModel.objects.create(ints=NumericRange(None, 0)),
RangesModel.objects.create(ints=NumericRange(empty=True)),
RangesModel.objects.create(ints=None),
]
def test_exact(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__exact=NumericRange(0, 10)),
[self.objs[0]],
)
def test_isnull(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__isnull=True),
[self.objs[4]],
)
def test_isempty(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__isempty=True),
[self.objs[3]],
)
def test_contains(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__contains=8),
[self.objs[0], self.objs[1]],
)
def test_contains_range(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__contains=NumericRange(3, 8)),
[self.objs[0]],
)
def test_contained_by(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__contained_by=NumericRange(0, 20)),
[self.objs[0], self.objs[1], self.objs[3]],
)
def test_overlap(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__overlap=NumericRange(3, 8)),
[self.objs[0], self.objs[1]],
)
def test_fully_lt(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__fully_lt=NumericRange(5, 10)),
[self.objs[2]],
)
def test_fully_gt(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__fully_gt=NumericRange(5, 10)),
[],
)
def test_not_lt(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__not_lt=NumericRange(5, 10)),
[self.objs[1]],
)
def test_not_gt(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__not_gt=NumericRange(5, 10)),
[self.objs[0], self.objs[2]],
)
def test_adjacent_to(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__adjacent_to=NumericRange(0, 5)),
[self.objs[1], self.objs[2]],
)
def test_startswith(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__startswith=0),
[self.objs[0]],
)
def test_endswith(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__endswith=0),
[self.objs[2]],
)
def test_startswith_chaining(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__startswith__gte=0),
[self.objs[0], self.objs[1]],
)
@skipUnlessPG92
class TestQueringWithRanges(TestCase):
def test_date_range(self):
objs = [
RangeLookupsModel.objects.create(date='2015-01-01'),
RangeLookupsModel.objects.create(date='2015-05-05'),
]
self.assertSequenceEqual(
RangeLookupsModel.objects.filter(date__contained_by=DateRange('2015-01-01', '2015-05-04')),
[objs[0]],
)
def test_date_range_datetime_field(self):
objs = [
RangeLookupsModel.objects.create(timestamp='2015-01-01'),
RangeLookupsModel.objects.create(timestamp='2015-05-05'),
]
self.assertSequenceEqual(
RangeLookupsModel.objects.filter(timestamp__date__contained_by=DateRange('2015-01-01', '2015-05-04')),
[objs[0]],
)
def test_datetime_range(self):
objs = [
RangeLookupsModel.objects.create(timestamp='2015-01-01T09:00:00'),
RangeLookupsModel.objects.create(timestamp='2015-05-05T17:00:00'),
]
self.assertSequenceEqual(
RangeLookupsModel.objects.filter(
timestamp__contained_by=DateTimeTZRange('2015-01-01T09:00', '2015-05-04T23:55')
),
[objs[0]],
)
def test_integer_range(self):
objs = [
RangeLookupsModel.objects.create(integer=5),
RangeLookupsModel.objects.create(integer=99),
RangeLookupsModel.objects.create(integer=-1),
]
self.assertSequenceEqual(
RangeLookupsModel.objects.filter(integer__contained_by=NumericRange(1, 98)),
[objs[0]]
)
def test_biginteger_range(self):
objs = [
RangeLookupsModel.objects.create(big_integer=5),
RangeLookupsModel.objects.create(big_integer=99),
RangeLookupsModel.objects.create(big_integer=-1),
]
self.assertSequenceEqual(
RangeLookupsModel.objects.filter(big_integer__contained_by=NumericRange(1, 98)),
[objs[0]]
)
def test_float_range(self):
objs = [
RangeLookupsModel.objects.create(float=5),
RangeLookupsModel.objects.create(float=99),
RangeLookupsModel.objects.create(float=-1),
]
self.assertSequenceEqual(
RangeLookupsModel.objects.filter(float__contained_by=NumericRange(1, 98)),
[objs[0]]
)
def test_f_ranges(self):
parent = RangesModel.objects.create(floats=NumericRange(0, 10))
objs = [
RangeLookupsModel.objects.create(float=5, parent=parent),
RangeLookupsModel.objects.create(float=99, parent=parent),
]
self.assertSequenceEqual(
RangeLookupsModel.objects.filter(float__contained_by=F('parent__floats')),
[objs[0]]
)
def test_exclude(self):
objs = [
RangeLookupsModel.objects.create(float=5),
RangeLookupsModel.objects.create(float=99),
RangeLookupsModel.objects.create(float=-1),
]
self.assertSequenceEqual(
RangeLookupsModel.objects.exclude(float__contained_by=NumericRange(0, 100)),
[objs[2]]
)
@skipUnlessPG92
class TestSerialization(TestCase):
test_data = (
'[{"fields": {"ints": "{\\"upper\\": \\"10\\", \\"lower\\": \\"0\\", '
'\\"bounds\\": \\"[)\\"}", "floats": "{\\"empty\\": true}", '
'"bigints": null, "timestamps": "{\\"upper\\": \\"2014-02-02T12:12:12+00:00\\", '
'\\"lower\\": \\"2014-01-01T00:00:00+00:00\\", \\"bounds\\": \\"[)\\"}", '
'"dates": "{\\"upper\\": \\"2014-02-02\\", \\"lower\\": \\"2014-01-01\\", \\"bounds\\": \\"[)\\"}" }, '
'"model": "postgres_tests.rangesmodel", "pk": null}]'
)
lower_date = datetime.date(2014, 1, 1)
upper_date = datetime.date(2014, 2, 2)
lower_dt = datetime.datetime(2014, 1, 1, 0, 0, 0, tzinfo=timezone.utc)
upper_dt = datetime.datetime(2014, 2, 2, 12, 12, 12, tzinfo=timezone.utc)
def test_dumping(self):
instance = RangesModel(ints=NumericRange(0, 10), floats=NumericRange(empty=True),
timestamps=DateTimeTZRange(self.lower_dt, self.upper_dt),
dates=DateRange(self.lower_date, self.upper_date))
data = serializers.serialize('json', [instance])
dumped = json.loads(data)
for field in ('ints', 'dates', 'timestamps'):
dumped[0]['fields'][field] = json.loads(dumped[0]['fields'][field])
check = json.loads(self.test_data)
for field in ('ints', 'dates', 'timestamps'):
check[0]['fields'][field] = json.loads(check[0]['fields'][field])
self.assertEqual(dumped, check)
def test_loading(self):
instance = list(serializers.deserialize('json', self.test_data))[0].object
self.assertEqual(instance.ints, NumericRange(0, 10))
self.assertEqual(instance.floats, NumericRange(empty=True))
self.assertEqual(instance.bigints, None)
class TestValidators(PostgreSQLTestCase):
def test_max(self):
validator = RangeMaxValueValidator(5)
validator(NumericRange(0, 5))
with self.assertRaises(exceptions.ValidationError) as cm:
validator(NumericRange(0, 10))
self.assertEqual(cm.exception.messages[0], 'Ensure that this range is completely less than or equal to 5.')
self.assertEqual(cm.exception.code, 'max_value')
def test_min(self):
validator = RangeMinValueValidator(5)
validator(NumericRange(10, 15))
with self.assertRaises(exceptions.ValidationError) as cm:
validator(NumericRange(0, 10))
self.assertEqual(cm.exception.messages[0], 'Ensure that this range is completely greater than or equal to 5.')
self.assertEqual(cm.exception.code, 'min_value')
class TestFormField(PostgreSQLTestCase):
def test_valid_integer(self):
field = pg_forms.IntegerRangeField()
value = field.clean(['1', '2'])
self.assertEqual(value, NumericRange(1, 2))
def test_valid_floats(self):
field = pg_forms.FloatRangeField()
value = field.clean(['1.12345', '2.001'])
self.assertEqual(value, NumericRange(1.12345, 2.001))
def test_valid_timestamps(self):
field = pg_forms.DateTimeRangeField()
value = field.clean(['01/01/2014 00:00:00', '02/02/2014 12:12:12'])
lower = datetime.datetime(2014, 1, 1, 0, 0, 0)
upper = datetime.datetime(2014, 2, 2, 12, 12, 12)
self.assertEqual(value, DateTimeTZRange(lower, upper))
def test_valid_dates(self):
field = pg_forms.DateRangeField()
value = field.clean(['01/01/2014', '02/02/2014'])
lower = datetime.date(2014, 1, 1)
upper = datetime.date(2014, 2, 2)
self.assertEqual(value, DateRange(lower, upper))
def test_using_split_datetime_widget(self):
class SplitDateTimeRangeField(pg_forms.DateTimeRangeField):
base_field = forms.SplitDateTimeField
class SplitForm(forms.Form):
field = SplitDateTimeRangeField()
form = SplitForm()
self.assertHTMLEqual(str(form), '''
<tr>
<th>
<label for="id_field_0">Field:</label>
</th>
<td>
<input id="id_field_0_0" name="field_0_0" type="text" />
<input id="id_field_0_1" name="field_0_1" type="text" />
<input id="id_field_1_0" name="field_1_0" type="text" />
<input id="id_field_1_1" name="field_1_1" type="text" />
</td>
</tr>
''')
form = SplitForm({
'field_0_0': '01/01/2014',
'field_0_1': '00:00:00',
'field_1_0': '02/02/2014',
'field_1_1': '12:12:12',
})
self.assertTrue(form.is_valid())
lower = datetime.datetime(2014, 1, 1, 0, 0, 0)
upper = datetime.datetime(2014, 2, 2, 12, 12, 12)
self.assertEqual(form.cleaned_data['field'], DateTimeTZRange(lower, upper))
def test_none(self):
field = pg_forms.IntegerRangeField(required=False)
value = field.clean(['', ''])
self.assertEqual(value, None)
def test_rendering(self):
class RangeForm(forms.Form):
ints = pg_forms.IntegerRangeField()
self.assertHTMLEqual(str(RangeForm()), '''
<tr>
<th><label for="id_ints_0">Ints:</label></th>
<td>
<input id="id_ints_0" name="ints_0" type="number" />
<input id="id_ints_1" name="ints_1" type="number" />
</td>
</tr>
''')
def test_integer_lower_bound_higher(self):
field = pg_forms.IntegerRangeField()
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean(['10', '2'])
self.assertEqual(cm.exception.messages[0], 'The start of the range must not exceed the end of the range.')
self.assertEqual(cm.exception.code, 'bound_ordering')
def test_integer_open(self):
field = pg_forms.IntegerRangeField()
value = field.clean(['', '0'])
self.assertEqual(value, NumericRange(None, 0))
def test_integer_incorrect_data_type(self):
field = pg_forms.IntegerRangeField()
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean('1')
self.assertEqual(cm.exception.messages[0], 'Enter two whole numbers.')
self.assertEqual(cm.exception.code, 'invalid')
def test_integer_invalid_lower(self):
field = pg_forms.IntegerRangeField()
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean(['a', '2'])
self.assertEqual(cm.exception.messages[0], 'Enter a whole number.')
def test_integer_invalid_upper(self):
field = pg_forms.IntegerRangeField()
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean(['1', 'b'])
self.assertEqual(cm.exception.messages[0], 'Enter a whole number.')
def test_integer_required(self):
field = pg_forms.IntegerRangeField(required=True)
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean(['', ''])
self.assertEqual(cm.exception.messages[0], 'This field is required.')
value = field.clean([1, ''])
self.assertEqual(value, NumericRange(1, None))
def test_float_lower_bound_higher(self):
field = pg_forms.FloatRangeField()
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean(['1.8', '1.6'])
self.assertEqual(cm.exception.messages[0], 'The start of the range must not exceed the end of the range.')
self.assertEqual(cm.exception.code, 'bound_ordering')
def test_float_open(self):
field = pg_forms.FloatRangeField()
value = field.clean(['', '3.1415926'])
self.assertEqual(value, NumericRange(None, 3.1415926))
def test_float_incorrect_data_type(self):
field = pg_forms.FloatRangeField()
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean('1.6')
self.assertEqual(cm.exception.messages[0], 'Enter two numbers.')
self.assertEqual(cm.exception.code, 'invalid')
def test_float_invalid_lower(self):
field = pg_forms.FloatRangeField()
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean(['a', '3.1415926'])
self.assertEqual(cm.exception.messages[0], 'Enter a number.')
def test_float_invalid_upper(self):
field = pg_forms.FloatRangeField()
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean(['1.61803399', 'b'])
self.assertEqual(cm.exception.messages[0], 'Enter a number.')
def test_float_required(self):
field = pg_forms.FloatRangeField(required=True)
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean(['', ''])
self.assertEqual(cm.exception.messages[0], 'This field is required.')
value = field.clean(['1.61803399', ''])
self.assertEqual(value, NumericRange(1.61803399, None))
def test_date_lower_bound_higher(self):
field = pg_forms.DateRangeField()
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean(['2013-04-09', '1976-04-16'])
self.assertEqual(cm.exception.messages[0], 'The start of the range must not exceed the end of the range.')
self.assertEqual(cm.exception.code, 'bound_ordering')
def test_date_open(self):
field = pg_forms.DateRangeField()
value = field.clean(['', '2013-04-09'])
self.assertEqual(value, DateRange(None, datetime.date(2013, 4, 9)))
def test_date_incorrect_data_type(self):
field = pg_forms.DateRangeField()
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean('1')
self.assertEqual(cm.exception.messages[0], 'Enter two valid dates.')
self.assertEqual(cm.exception.code, 'invalid')
def test_date_invalid_lower(self):
field = pg_forms.DateRangeField()
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean(['a', '2013-04-09'])
self.assertEqual(cm.exception.messages[0], 'Enter a valid date.')
def test_date_invalid_upper(self):
field = pg_forms.DateRangeField()
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean(['2013-04-09', 'b'])
self.assertEqual(cm.exception.messages[0], 'Enter a valid date.')
def test_date_required(self):
field = pg_forms.DateRangeField(required=True)
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean(['', ''])
self.assertEqual(cm.exception.messages[0], 'This field is required.')
value = field.clean(['1976-04-16', ''])
self.assertEqual(value, DateRange(datetime.date(1976, 4, 16), None))
def test_datetime_lower_bound_higher(self):
field = pg_forms.DateTimeRangeField()
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean(['2006-10-25 14:59', '2006-10-25 14:58'])
self.assertEqual(cm.exception.messages[0], 'The start of the range must not exceed the end of the range.')
self.assertEqual(cm.exception.code, 'bound_ordering')
def test_datetime_open(self):
field = pg_forms.DateTimeRangeField()
value = field.clean(['', '2013-04-09 11:45'])
self.assertEqual(value, DateTimeTZRange(None, datetime.datetime(2013, 4, 9, 11, 45)))
def test_datetime_incorrect_data_type(self):
field = pg_forms.DateTimeRangeField()
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean('2013-04-09 11:45')
self.assertEqual(cm.exception.messages[0], 'Enter two valid date/times.')
self.assertEqual(cm.exception.code, 'invalid')
def test_datetime_invalid_lower(self):
field = pg_forms.DateTimeRangeField()
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean(['45', '2013-04-09 11:45'])
self.assertEqual(cm.exception.messages[0], 'Enter a valid date/time.')
def test_datetime_invalid_upper(self):
field = pg_forms.DateTimeRangeField()
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean(['2013-04-09 11:45', 'sweet pickles'])
self.assertEqual(cm.exception.messages[0], 'Enter a valid date/time.')
def test_datetime_required(self):
field = pg_forms.DateTimeRangeField(required=True)
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean(['', ''])
self.assertEqual(cm.exception.messages[0], 'This field is required.')
value = field.clean(['2013-04-09 11:45', ''])
self.assertEqual(value, DateTimeTZRange(datetime.datetime(2013, 4, 9, 11, 45), None))
@override_settings(USE_TZ=True, TIME_ZONE='Africa/Johannesburg')
def test_datetime_prepare_value(self):
field = pg_forms.DateTimeRangeField()
value = field.prepare_value(
DateTimeTZRange(datetime.datetime(2015, 5, 22, 16, 6, 33, tzinfo=timezone.utc), None)
)
self.assertEqual(value, [datetime.datetime(2015, 5, 22, 18, 6, 33), None])
def test_model_field_formfield_integer(self):
model_field = pg_fields.IntegerRangeField()
form_field = model_field.formfield()
self.assertIsInstance(form_field, pg_forms.IntegerRangeField)
def test_model_field_formfield_biginteger(self):
model_field = pg_fields.BigIntegerRangeField()
form_field = model_field.formfield()
self.assertIsInstance(form_field, pg_forms.IntegerRangeField)
def test_model_field_formfield_float(self):
model_field = pg_fields.FloatRangeField()
form_field = model_field.formfield()
self.assertIsInstance(form_field, pg_forms.FloatRangeField)
def test_model_field_formfield_date(self):
model_field = pg_fields.DateRangeField()
form_field = model_field.formfield()
self.assertIsInstance(form_field, pg_forms.DateRangeField)
def test_model_field_formfield_datetime(self):
model_field = pg_fields.DateTimeRangeField()
form_field = model_field.formfield()
self.assertIsInstance(form_field, pg_forms.DateTimeRangeField)
class TestWidget(PostgreSQLTestCase):
def test_range_widget(self):
f = pg_forms.ranges.DateTimeRangeField()
self.assertHTMLEqual(
f.widget.render('datetimerange', ''),
'<input type="text" name="datetimerange_0" /><input type="text" name="datetimerange_1" />'
)
self.assertHTMLEqual(
f.widget.render('datetimerange', None),
'<input type="text" name="datetimerange_0" /><input type="text" name="datetimerange_1" />'
)
dt_range = DateTimeTZRange(
datetime.datetime(2006, 1, 10, 7, 30),
datetime.datetime(2006, 2, 12, 9, 50)
)
self.assertHTMLEqual(
f.widget.render('datetimerange', dt_range),
'<input type="text" name="datetimerange_0" value="2006-01-10 07:30:00" />'
'<input type="text" name="datetimerange_1" value="2006-02-12 09:50:00" />'
)
| bsd-3-clause | -4,758,179,205,725,029,000 | 37.89557 | 118 | 0.616142 | false |
Delgan/w2ui | server/python/bottle/bottle.py | 28 | 143552 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Bottle is a fast and simple micro-framework for small web applications. It
offers request dispatching (Routes) with url parameter support, templates,
a built-in HTTP Server and adapters for many third party WSGI/HTTP-server and
template engines - all in a single file and with no dependencies other than the
Python Standard Library.
Homepage and documentation: http://bottlepy.org/
Copyright (c) 2014, Marcel Hellkamp.
License: MIT (see LICENSE for details)
"""
from __future__ import with_statement
__author__ = 'Marcel Hellkamp'
__version__ = '0.13-dev'
__license__ = 'MIT'
# The gevent and eventlet server adapters need to patch some modules before
# they are imported. This is why we parse the commandline parameters here but
# handle them later
if __name__ == '__main__':
from optparse import OptionParser
_cmd_parser = OptionParser(usage="usage: %prog [options] package.module:app")
_opt = _cmd_parser.add_option
_opt("--version", action="store_true", help="show version number.")
_opt("-b", "--bind", metavar="ADDRESS", help="bind socket to ADDRESS.")
_opt("-s", "--server", default='wsgiref', help="use SERVER as backend.")
_opt("-p", "--plugin", action="append", help="install additional plugin/s.")
_opt("--debug", action="store_true", help="start server in debug mode.")
_opt("--reload", action="store_true", help="auto-reload on file changes.")
_cmd_options, _cmd_args = _cmd_parser.parse_args()
if _cmd_options.server:
if _cmd_options.server.startswith('gevent'):
import gevent.monkey; gevent.monkey.patch_all()
elif _cmd_options.server.startswith('eventlet'):
import eventlet; eventlet.monkey_patch()
import base64, cgi, email.utils, functools, hmac, imp, itertools, mimetypes,\
os, re, subprocess, sys, tempfile, threading, time, warnings
from datetime import date as datedate, datetime, timedelta
from tempfile import TemporaryFile
from traceback import format_exc, print_exc
from inspect import getargspec
from unicodedata import normalize
try: from simplejson import dumps as json_dumps, loads as json_lds
except ImportError: # pragma: no cover
try: from json import dumps as json_dumps, loads as json_lds
except ImportError:
try: from django.utils.simplejson import dumps as json_dumps, loads as json_lds
except ImportError:
def json_dumps(data):
raise ImportError("JSON support requires Python 2.6 or simplejson.")
json_lds = json_dumps
# We now try to fix 2.5/2.6/3.1/3.2 incompatibilities.
# It ain't pretty but it works... Sorry for the mess.
py = sys.version_info
py3k = py >= (3, 0, 0)
py25 = py < (2, 6, 0)
py31 = (3, 1, 0) <= py < (3, 2, 0)
# Workaround for the missing "as" keyword in py3k.
def _e(): return sys.exc_info()[1]
# Workaround for the "print is a keyword/function" Python 2/3 dilemma
# and a fallback for mod_wsgi (resticts stdout/err attribute access)
try:
_stdout, _stderr = sys.stdout.write, sys.stderr.write
except IOError:
_stdout = lambda x: sys.stdout.write(x)
_stderr = lambda x: sys.stderr.write(x)
# Lots of stdlib and builtin differences.
if py3k:
import http.client as httplib
import _thread as thread
from urllib.parse import urljoin, SplitResult as UrlSplitResult
from urllib.parse import urlencode, quote as urlquote, unquote as urlunquote
urlunquote = functools.partial(urlunquote, encoding='latin1')
from http.cookies import SimpleCookie
from collections import MutableMapping as DictMixin
import pickle
from io import BytesIO
from configparser import ConfigParser
basestring = str
unicode = str
json_loads = lambda s: json_lds(touni(s))
callable = lambda x: hasattr(x, '__call__')
imap = map
def _raise(*a): raise a[0](a[1]).with_traceback(a[2])
else: # 2.x
import httplib
import thread
from urlparse import urljoin, SplitResult as UrlSplitResult
from urllib import urlencode, quote as urlquote, unquote as urlunquote
from Cookie import SimpleCookie
from itertools import imap
import cPickle as pickle
from StringIO import StringIO as BytesIO
from ConfigParser import SafeConfigParser as ConfigParser
if py25:
msg = "Python 2.5 support may be dropped in future versions of Bottle."
warnings.warn(msg, DeprecationWarning)
from UserDict import DictMixin
def next(it): return it.next()
bytes = str
else: # 2.6, 2.7
from collections import MutableMapping as DictMixin
unicode = unicode
json_loads = json_lds
eval(compile('def _raise(*a): raise a[0], a[1], a[2]', '<py3fix>', 'exec'))
# Some helpers for string/byte handling
def tob(s, enc='utf8'):
return s.encode(enc) if isinstance(s, unicode) else bytes(s)
def touni(s, enc='utf8', err='strict'):
if isinstance(s, bytes):
return s.decode(enc, err)
else:
return unicode(s or ("" if s is None else s))
tonat = touni if py3k else tob
# 3.2 fixes cgi.FieldStorage to accept bytes (which makes a lot of sense).
# 3.1 needs a workaround.
if py31:
from io import TextIOWrapper
class NCTextIOWrapper(TextIOWrapper):
def close(self): pass # Keep wrapped buffer open.
# A bug in functools causes it to break if the wrapper is an instance method
def update_wrapper(wrapper, wrapped, *a, **ka):
try:
functools.update_wrapper(wrapper, wrapped, *a, **ka)
except AttributeError:
pass
# These helpers are used at module level and need to be defined first.
# And yes, I know PEP-8, but sometimes a lower-case classname makes more sense.
def depr(message, strict=False):
warnings.warn(message, DeprecationWarning, stacklevel=3)
def makelist(data): # This is just too handy
if isinstance(data, (tuple, list, set, dict)):
return list(data)
elif data:
return [data]
else:
return []
class DictProperty(object):
""" Property that maps to a key in a local dict-like attribute. """
def __init__(self, attr, key=None, read_only=False):
self.attr, self.key, self.read_only = attr, key, read_only
def __call__(self, func):
functools.update_wrapper(self, func, updated=[])
self.getter, self.key = func, self.key or func.__name__
return self
def __get__(self, obj, cls):
if obj is None: return self
key, storage = self.key, getattr(obj, self.attr)
if key not in storage: storage[key] = self.getter(obj)
return storage[key]
def __set__(self, obj, value):
if self.read_only: raise AttributeError("Read-Only property.")
getattr(obj, self.attr)[self.key] = value
def __delete__(self, obj):
if self.read_only: raise AttributeError("Read-Only property.")
del getattr(obj, self.attr)[self.key]
class cached_property(object):
""" A property that is only computed once per instance and then replaces
itself with an ordinary attribute. Deleting the attribute resets the
property. """
def __init__(self, func):
self.__doc__ = getattr(func, '__doc__')
self.func = func
def __get__(self, obj, cls):
if obj is None: return self
value = obj.__dict__[self.func.__name__] = self.func(obj)
return value
class lazy_attribute(object):
""" A property that caches itself to the class object. """
def __init__(self, func):
functools.update_wrapper(self, func, updated=[])
self.getter = func
def __get__(self, obj, cls):
value = self.getter(cls)
setattr(cls, self.__name__, value)
return value
###############################################################################
# Exceptions and Events ########################################################
###############################################################################
class BottleException(Exception):
""" A base class for exceptions used by bottle. """
pass
###############################################################################
# Routing ######################################################################
###############################################################################
class RouteError(BottleException):
""" This is a base class for all routing related exceptions """
class RouteReset(BottleException):
""" If raised by a plugin or request handler, the route is reset and all
plugins are re-applied. """
class RouterUnknownModeError(RouteError): pass
class RouteSyntaxError(RouteError):
""" The route parser found something not supported by this router. """
class RouteBuildError(RouteError):
""" The route could not be built. """
def _re_flatten(p):
""" Turn all capturing groups in a regular expression pattern into
non-capturing groups. """
if '(' not in p:
return p
return re.sub(r'(\\*)(\(\?P<[^>]+>|\((?!\?))',
lambda m: m.group(0) if len(m.group(1)) % 2 else m.group(1) + '(?:', p)
class Router(object):
""" A Router is an ordered collection of route->target pairs. It is used to
efficiently match WSGI requests against a number of routes and return
the first target that satisfies the request. The target may be anything,
usually a string, ID or callable object. A route consists of a path-rule
and a HTTP method.
The path-rule is either a static path (e.g. `/contact`) or a dynamic
path that contains wildcards (e.g. `/wiki/<page>`). The wildcard syntax
and details on the matching order are described in docs:`routing`.
"""
default_pattern = '[^/]+'
default_filter = 're'
#: The current CPython regexp implementation does not allow more
#: than 99 matching groups per regular expression.
_MAX_GROUPS_PER_PATTERN = 99
def __init__(self, strict=False):
self.rules = [] # All rules in order
self._groups = {} # index of regexes to find them in dyna_routes
self.builder = {} # Data structure for the url builder
self.static = {} # Search structure for static routes
self.dyna_routes = {}
self.dyna_regexes = {} # Search structure for dynamic routes
#: If true, static routes are no longer checked first.
self.strict_order = strict
self.filters = {
're': lambda conf:
(_re_flatten(conf or self.default_pattern), None, None),
'int': lambda conf: (r'-?\d+', int, lambda x: str(int(x))),
'float': lambda conf: (r'-?[\d.]+', float, lambda x: str(float(x))),
'path': lambda conf: (r'.+?', None, None)}
def add_filter(self, name, func):
""" Add a filter. The provided function is called with the configuration
string as parameter and must return a (regexp, to_python, to_url) tuple.
The first element is a string, the last two are callables or None. """
self.filters[name] = func
rule_syntax = re.compile('(\\\\*)'
'(?:(?::([a-zA-Z_][a-zA-Z_0-9]*)?()(?:#(.*?)#)?)'
'|(?:<([a-zA-Z_][a-zA-Z_0-9]*)?(?::([a-zA-Z_]*)'
'(?::((?:\\\\.|[^\\\\>]+)+)?)?)?>))')
def _itertokens(self, rule):
offset, prefix = 0, ''
for match in self.rule_syntax.finditer(rule):
prefix += rule[offset:match.start()]
g = match.groups()
if len(g[0])%2: # Escaped wildcard
prefix += match.group(0)[len(g[0]):]
offset = match.end()
continue
if prefix:
yield prefix, None, None
name, filtr, conf = g[4:7] if g[2] is None else g[1:4]
yield name, filtr or 'default', conf or None
offset, prefix = match.end(), ''
if offset <= len(rule) or prefix:
yield prefix+rule[offset:], None, None
def add(self, rule, method, target, name=None):
""" Add a new rule or replace the target for an existing rule. """
anons = 0 # Number of anonymous wildcards found
keys = [] # Names of keys
pattern = '' # Regular expression pattern with named groups
filters = [] # Lists of wildcard input filters
builder = [] # Data structure for the URL builder
is_static = True
for key, mode, conf in self._itertokens(rule):
if mode:
is_static = False
if mode == 'default': mode = self.default_filter
mask, in_filter, out_filter = self.filters[mode](conf)
if not key:
pattern += '(?:%s)' % mask
key = 'anon%d' % anons
anons += 1
else:
pattern += '(?P<%s>%s)' % (key, mask)
keys.append(key)
if in_filter: filters.append((key, in_filter))
builder.append((key, out_filter or str))
elif key:
pattern += re.escape(key)
builder.append((None, key))
self.builder[rule] = builder
if name: self.builder[name] = builder
if is_static and not self.strict_order:
self.static.setdefault(method, {})
self.static[method][self.build(rule)] = (target, None)
return
try:
re_pattern = re.compile('^(%s)$' % pattern)
re_match = re_pattern.match
except re.error:
raise RouteSyntaxError("Could not add Route: %s (%s)" % (rule, _e()))
if filters:
def getargs(path):
url_args = re_match(path).groupdict()
for name, wildcard_filter in filters:
try:
url_args[name] = wildcard_filter(url_args[name])
except ValueError:
raise HTTPError(400, 'Path has wrong format.')
return url_args
elif re_pattern.groupindex:
def getargs(path):
return re_match(path).groupdict()
else:
getargs = None
flatpat = _re_flatten(pattern)
whole_rule = (rule, flatpat, target, getargs)
if (flatpat, method) in self._groups:
if DEBUG:
msg = 'Route <%s %s> overwrites a previously defined route'
warnings.warn(msg % (method, rule), RuntimeWarning)
self.dyna_routes[method][self._groups[flatpat, method]] = whole_rule
else:
self.dyna_routes.setdefault(method, []).append(whole_rule)
self._groups[flatpat, method] = len(self.dyna_routes[method]) - 1
self._compile(method)
def _compile(self, method):
all_rules = self.dyna_routes[method]
comborules = self.dyna_regexes[method] = []
maxgroups = self._MAX_GROUPS_PER_PATTERN
for x in range(0, len(all_rules), maxgroups):
some = all_rules[x:x+maxgroups]
combined = (flatpat for (_, flatpat, _, _) in some)
combined = '|'.join('(^%s$)' % flatpat for flatpat in combined)
combined = re.compile(combined).match
rules = [(target, getargs) for (_, _, target, getargs) in some]
comborules.append((combined, rules))
def build(self, _name, *anons, **query):
""" Build an URL by filling the wildcards in a rule. """
builder = self.builder.get(_name)
if not builder: raise RouteBuildError("No route with that name.", _name)
try:
for i, value in enumerate(anons): query['anon%d'%i] = value
url = ''.join([f(query.pop(n)) if n else f for (n,f) in builder])
return url if not query else url+'?'+urlencode(query)
except KeyError:
raise RouteBuildError('Missing URL argument: %r' % _e().args[0])
def match(self, environ):
""" Return a (target, url_args) tuple or raise HTTPError(400/404/405). """
verb = environ['REQUEST_METHOD'].upper()
path = environ['PATH_INFO'] or '/'
if verb == 'HEAD':
methods = ['PROXY', verb, 'GET', 'ANY']
else:
methods = ['PROXY', verb, 'ANY']
for method in methods:
if method in self.static and path in self.static[method]:
target, getargs = self.static[method][path]
return target, getargs(path) if getargs else {}
elif method in self.dyna_regexes:
for combined, rules in self.dyna_regexes[method]:
match = combined(path)
if match:
target, getargs = rules[match.lastindex - 1]
return target, getargs(path) if getargs else {}
# No matching route found. Collect alternative methods for 405 response
allowed = set([])
nocheck = set(methods)
for method in set(self.static) - nocheck:
if path in self.static[method]:
allowed.add(verb)
for method in set(self.dyna_regexes) - allowed - nocheck:
for combined, rules in self.dyna_regexes[method]:
match = combined(path)
if match:
allowed.add(method)
if allowed:
allow_header = ",".join(sorted(allowed))
raise HTTPError(405, "Method not allowed.", Allow=allow_header)
# No matching route and no alternative method found. We give up
raise HTTPError(404, "Not found: " + repr(path))
class Route(object):
""" This class wraps a route callback along with route specific metadata and
configuration and applies Plugins on demand. It is also responsible for
turing an URL path rule into a regular expression usable by the Router.
"""
def __init__(self, app, rule, method, callback, name=None,
plugins=None, skiplist=None, **config):
#: The application this route is installed to.
self.app = app
#: The path-rule string (e.g. ``/wiki/<page>``).
self.rule = rule
#: The HTTP method as a string (e.g. ``GET``).
self.method = method
#: The original callback with no plugins applied. Useful for introspection.
self.callback = callback
#: The name of the route (if specified) or ``None``.
self.name = name or None
#: A list of route-specific plugins (see :meth:`Bottle.route`).
self.plugins = plugins or []
#: A list of plugins to not apply to this route (see :meth:`Bottle.route`).
self.skiplist = skiplist or []
#: Additional keyword arguments passed to the :meth:`Bottle.route`
#: decorator are stored in this dictionary. Used for route-specific
#: plugin configuration and meta-data.
self.config = ConfigDict().load_dict(config)
@cached_property
def call(self):
""" The route callback with all plugins applied. This property is
created on demand and then cached to speed up subsequent requests."""
return self._make_callback()
def reset(self):
""" Forget any cached values. The next time :attr:`call` is accessed,
all plugins are re-applied. """
self.__dict__.pop('call', None)
def prepare(self):
""" Do all on-demand work immediately (useful for debugging)."""
self.call
def all_plugins(self):
""" Yield all Plugins affecting this route. """
unique = set()
for p in reversed(self.app.plugins + self.plugins):
if True in self.skiplist: break
name = getattr(p, 'name', False)
if name and (name in self.skiplist or name in unique): continue
if p in self.skiplist or type(p) in self.skiplist: continue
if name: unique.add(name)
yield p
def _make_callback(self):
callback = self.callback
for plugin in self.all_plugins():
try:
if hasattr(plugin, 'apply'):
callback = plugin.apply(callback, self)
else:
callback = plugin(callback)
except RouteReset: # Try again with changed configuration.
return self._make_callback()
if not callback is self.callback:
update_wrapper(callback, self.callback)
return callback
def get_undecorated_callback(self):
""" Return the callback. If the callback is a decorated function, try to
recover the original function. """
func = self.callback
func = getattr(func, '__func__' if py3k else 'im_func', func)
closure_attr = '__closure__' if py3k else 'func_closure'
while hasattr(func, closure_attr) and getattr(func, closure_attr):
func = getattr(func, closure_attr)[0].cell_contents
return func
def get_callback_args(self):
""" Return a list of argument names the callback (most likely) accepts
as keyword arguments. If the callback is a decorated function, try
to recover the original function before inspection. """
return getargspec(self.get_undecorated_callback())[0]
def get_config(self, key, default=None):
""" Lookup a config field and return its value, first checking the
route.config, then route.app.config."""
for conf in (self.config, self.app.conifg):
if key in conf: return conf[key]
return default
def __repr__(self):
cb = self.get_undecorated_callback()
return '<%s %r %r>' % (self.method, self.rule, cb)
###############################################################################
# Application Object ###########################################################
###############################################################################
class Bottle(object):
""" Each Bottle object represents a single, distinct web application and
consists of routes, callbacks, plugins, resources and configuration.
Instances are callable WSGI applications.
:param catchall: If true (default), handle all exceptions. Turn off to
let debugging middleware handle exceptions.
"""
def __init__(self, catchall=True, autojson=True):
#: A :class:`ConfigDict` for app specific configuration.
self.config = ConfigDict()
self.config._on_change = functools.partial(self.trigger_hook, 'config')
self.config.meta_set('autojson', 'validate', bool)
self.config.meta_set('catchall', 'validate', bool)
self.config['catchall'] = catchall
self.config['autojson'] = autojson
#: A :class:`ResourceManager` for application files
self.resources = ResourceManager()
self.routes = [] # List of installed :class:`Route` instances.
self.router = Router() # Maps requests to :class:`Route` instances.
self.error_handler = {}
# Core plugins
self.plugins = [] # List of installed plugins.
if self.config['autojson']:
self.install(JSONPlugin())
self.install(TemplatePlugin())
#: If true, most exceptions are caught and returned as :exc:`HTTPError`
catchall = DictProperty('config', 'catchall')
__hook_names = 'before_request', 'after_request', 'app_reset', 'config'
__hook_reversed = 'after_request'
@cached_property
def _hooks(self):
return dict((name, []) for name in self.__hook_names)
def add_hook(self, name, func):
""" Attach a callback to a hook. Three hooks are currently implemented:
before_request
Executed once before each request. The request context is
available, but no routing has happened yet.
after_request
Executed once after each request regardless of its outcome.
app_reset
Called whenever :meth:`Bottle.reset` is called.
"""
if name in self.__hook_reversed:
self._hooks[name].insert(0, func)
else:
self._hooks[name].append(func)
def remove_hook(self, name, func):
""" Remove a callback from a hook. """
if name in self._hooks and func in self._hooks[name]:
self._hooks[name].remove(func)
return True
def trigger_hook(self, __name, *args, **kwargs):
""" Trigger a hook and return a list of results. """
return [hook(*args, **kwargs) for hook in self._hooks[__name][:]]
def hook(self, name):
""" Return a decorator that attaches a callback to a hook. See
:meth:`add_hook` for details."""
def decorator(func):
self.add_hook(name, func)
return func
return decorator
def mount(self, prefix, app, **options):
""" Mount an application (:class:`Bottle` or plain WSGI) to a specific
URL prefix. Example::
root_app.mount('/admin/', admin_app)
:param prefix: path prefix or `mount-point`. If it ends in a slash,
that slash is mandatory.
:param app: an instance of :class:`Bottle` or a WSGI application.
All other parameters are passed to the underlying :meth:`route` call.
"""
segments = [p for p in prefix.split('/') if p]
if not segments: raise ValueError('Empty path prefix.')
path_depth = len(segments)
def mountpoint_wrapper():
try:
request.path_shift(path_depth)
rs = HTTPResponse([])
def start_response(status, headerlist, exc_info=None):
if exc_info:
_raise(*exc_info)
rs.status = status
for name, value in headerlist: rs.add_header(name, value)
return rs.body.append
body = app(request.environ, start_response)
if body and rs.body: body = itertools.chain(rs.body, body)
rs.body = body or rs.body
return rs
finally:
request.path_shift(-path_depth)
options.setdefault('skip', True)
options.setdefault('method', 'PROXY')
options.setdefault('mountpoint', {'prefix': prefix, 'target': app})
options['callback'] = mountpoint_wrapper
self.route('/%s/<:re:.*>' % '/'.join(segments), **options)
if not prefix.endswith('/'):
self.route('/' + '/'.join(segments), **options)
def merge(self, routes):
""" Merge the routes of another :class:`Bottle` application or a list of
:class:`Route` objects into this application. The routes keep their
'owner', meaning that the :data:`Route.app` attribute is not
changed. """
if isinstance(routes, Bottle):
routes = routes.routes
for route in routes:
self.add_route(route)
def install(self, plugin):
""" Add a plugin to the list of plugins and prepare it for being
applied to all routes of this application. A plugin may be a simple
decorator or an object that implements the :class:`Plugin` API.
"""
if hasattr(plugin, 'setup'): plugin.setup(self)
if not callable(plugin) and not hasattr(plugin, 'apply'):
raise TypeError("Plugins must be callable or implement .apply()")
self.plugins.append(plugin)
self.reset()
return plugin
def uninstall(self, plugin):
""" Uninstall plugins. Pass an instance to remove a specific plugin, a type
object to remove all plugins that match that type, a string to remove
all plugins with a matching ``name`` attribute or ``True`` to remove all
plugins. Return the list of removed plugins. """
removed, remove = [], plugin
for i, plugin in list(enumerate(self.plugins))[::-1]:
if remove is True or remove is plugin or remove is type(plugin) \
or getattr(plugin, 'name', True) == remove:
removed.append(plugin)
del self.plugins[i]
if hasattr(plugin, 'close'): plugin.close()
if removed: self.reset()
return removed
def reset(self, route=None):
""" Reset all routes (force plugins to be re-applied) and clear all
caches. If an ID or route object is given, only that specific route
is affected. """
if route is None: routes = self.routes
elif isinstance(route, Route): routes = [route]
else: routes = [self.routes[route]]
for route in routes: route.reset()
if DEBUG:
for route in routes: route.prepare()
self.trigger_hook('app_reset')
def close(self):
""" Close the application and all installed plugins. """
for plugin in self.plugins:
if hasattr(plugin, 'close'): plugin.close()
def run(self, **kwargs):
""" Calls :func:`run` with the same parameters. """
run(self, **kwargs)
def match(self, environ):
""" Search for a matching route and return a (:class:`Route` , urlargs)
tuple. The second value is a dictionary with parameters extracted
from the URL. Raise :exc:`HTTPError` (404/405) on a non-match."""
return self.router.match(environ)
def get_url(self, routename, **kargs):
""" Return a string that matches a named route """
scriptname = request.environ.get('SCRIPT_NAME', '').strip('/') + '/'
location = self.router.build(routename, **kargs).lstrip('/')
return urljoin(urljoin('/', scriptname), location)
def add_route(self, route):
""" Add a route object, but do not change the :data:`Route.app`
attribute."""
self.routes.append(route)
self.router.add(route.rule, route.method, route, name=route.name)
if DEBUG: route.prepare()
def route(self, path=None, method='GET', callback=None, name=None,
apply=None, skip=None, **config):
""" A decorator to bind a function to a request URL. Example::
@app.route('/hello/<name>')
def hello(name):
return 'Hello %s' % name
The ``:name`` part is a wildcard. See :class:`Router` for syntax
details.
:param path: Request path or a list of paths to listen to. If no
path is specified, it is automatically generated from the
signature of the function.
:param method: HTTP method (`GET`, `POST`, `PUT`, ...) or a list of
methods to listen to. (default: `GET`)
:param callback: An optional shortcut to avoid the decorator
syntax. ``route(..., callback=func)`` equals ``route(...)(func)``
:param name: The name for this route. (default: None)
:param apply: A decorator or plugin or a list of plugins. These are
applied to the route callback in addition to installed plugins.
:param skip: A list of plugins, plugin classes or names. Matching
plugins are not installed to this route. ``True`` skips all.
Any additional keyword arguments are stored as route-specific
configuration and passed to plugins (see :meth:`Plugin.apply`).
"""
if callable(path): path, callback = None, path
plugins = makelist(apply)
skiplist = makelist(skip)
def decorator(callback):
if isinstance(callback, basestring): callback = load(callback)
for rule in makelist(path) or yieldroutes(callback):
for verb in makelist(method):
verb = verb.upper()
route = Route(self, rule, verb, callback, name=name,
plugins=plugins, skiplist=skiplist, **config)
self.add_route(route)
return callback
return decorator(callback) if callback else decorator
def get(self, path=None, method='GET', **options):
""" Equals :meth:`route`. """
return self.route(path, method, **options)
def post(self, path=None, method='POST', **options):
""" Equals :meth:`route` with a ``POST`` method parameter. """
return self.route(path, method, **options)
def put(self, path=None, method='PUT', **options):
""" Equals :meth:`route` with a ``PUT`` method parameter. """
return self.route(path, method, **options)
def delete(self, path=None, method='DELETE', **options):
""" Equals :meth:`route` with a ``DELETE`` method parameter. """
return self.route(path, method, **options)
def patch(self, path=None, method='PATCH', **options):
""" Equals :meth:`route` with a ``PATCH`` method parameter. """
return self.route(path, method, **options)
def error(self, code=500):
""" Decorator: Register an output handler for a HTTP error code"""
def wrapper(handler):
self.error_handler[int(code)] = handler
return handler
return wrapper
def default_error_handler(self, res):
return tob(template(ERROR_PAGE_TEMPLATE, e=res))
def _handle(self, environ):
path = environ['bottle.raw_path'] = environ['PATH_INFO']
if py3k:
try:
environ['PATH_INFO'] = path.encode('latin1').decode('utf8')
except UnicodeError:
return HTTPError(400, 'Invalid path string. Expected UTF-8')
try:
environ['bottle.app'] = self
request.bind(environ)
response.bind()
try:
self.trigger_hook('before_request')
route, args = self.router.match(environ)
environ['route.handle'] = route
environ['bottle.route'] = route
environ['route.url_args'] = args
return route.call(**args)
finally:
self.trigger_hook('after_request')
except HTTPResponse:
return _e()
except RouteReset:
route.reset()
return self._handle(environ)
except (KeyboardInterrupt, SystemExit, MemoryError):
raise
except Exception:
if not self.catchall: raise
stacktrace = format_exc()
environ['wsgi.errors'].write(stacktrace)
return HTTPError(500, "Internal Server Error", _e(), stacktrace)
def _cast(self, out, peek=None):
""" Try to convert the parameter into something WSGI compatible and set
correct HTTP headers when possible.
Support: False, str, unicode, dict, HTTPResponse, HTTPError, file-like,
iterable of strings and iterable of unicodes
"""
# Empty output is done here
if not out:
if 'Content-Length' not in response:
response['Content-Length'] = 0
return []
# Join lists of byte or unicode strings. Mixed lists are NOT supported
if isinstance(out, (tuple, list))\
and isinstance(out[0], (bytes, unicode)):
out = out[0][0:0].join(out) # b'abc'[0:0] -> b''
# Encode unicode strings
if isinstance(out, unicode):
out = out.encode(response.charset)
# Byte Strings are just returned
if isinstance(out, bytes):
if 'Content-Length' not in response:
response['Content-Length'] = len(out)
return [out]
# HTTPError or HTTPException (recursive, because they may wrap anything)
# TODO: Handle these explicitly in handle() or make them iterable.
if isinstance(out, HTTPError):
out.apply(response)
out = self.error_handler.get(out.status_code, self.default_error_handler)(out)
return self._cast(out)
if isinstance(out, HTTPResponse):
out.apply(response)
return self._cast(out.body)
# File-like objects.
if hasattr(out, 'read'):
if 'wsgi.file_wrapper' in request.environ:
return request.environ['wsgi.file_wrapper'](out)
elif hasattr(out, 'close') or not hasattr(out, '__iter__'):
return WSGIFileWrapper(out)
# Handle Iterables. We peek into them to detect their inner type.
try:
iout = iter(out)
first = next(iout)
while not first:
first = next(iout)
except StopIteration:
return self._cast('')
except HTTPResponse:
first = _e()
except (KeyboardInterrupt, SystemExit, MemoryError):
raise
except:
if not self.catchall: raise
first = HTTPError(500, 'Unhandled exception', _e(), format_exc())
# These are the inner types allowed in iterator or generator objects.
if isinstance(first, HTTPResponse):
return self._cast(first)
elif isinstance(first, bytes):
new_iter = itertools.chain([first], iout)
elif isinstance(first, unicode):
encoder = lambda x: x.encode(response.charset)
new_iter = imap(encoder, itertools.chain([first], iout))
else:
msg = 'Unsupported response type: %s' % type(first)
return self._cast(HTTPError(500, msg))
if hasattr(out, 'close'):
new_iter = _closeiter(new_iter, out.close)
return new_iter
def wsgi(self, environ, start_response):
""" The bottle WSGI-interface. """
try:
out = self._cast(self._handle(environ))
# rfc2616 section 4.3
if response._status_code in (100, 101, 204, 304)\
or environ['REQUEST_METHOD'] == 'HEAD':
if hasattr(out, 'close'): out.close()
out = []
start_response(response._status_line, response.headerlist)
return out
except (KeyboardInterrupt, SystemExit, MemoryError):
raise
except:
if not self.catchall: raise
err = '<h1>Critical error while processing request: %s</h1>' \
% html_escape(environ.get('PATH_INFO', '/'))
if DEBUG:
err += '<h2>Error:</h2>\n<pre>\n%s\n</pre>\n' \
'<h2>Traceback:</h2>\n<pre>\n%s\n</pre>\n' \
% (html_escape(repr(_e())), html_escape(format_exc()))
environ['wsgi.errors'].write(err)
headers = [('Content-Type', 'text/html; charset=UTF-8')]
start_response('500 INTERNAL SERVER ERROR', headers, sys.exc_info())
return [tob(err)]
def __call__(self, environ, start_response):
""" Each instance of :class:'Bottle' is a WSGI application. """
return self.wsgi(environ, start_response)
def __enter__(self):
""" Use this application as default for all module-level shortcuts. """
default_app.push(self)
return self
def __exit__(self, exc_type, exc_value, traceback):
default_app.pop()
###############################################################################
# HTTP and WSGI Tools ##########################################################
###############################################################################
class BaseRequest(object):
""" A wrapper for WSGI environment dictionaries that adds a lot of
convenient access methods and properties. Most of them are read-only.
Adding new attributes to a request actually adds them to the environ
dictionary (as 'bottle.request.ext.<name>'). This is the recommended
way to store and access request-specific data.
"""
__slots__ = ('environ', )
#: Maximum size of memory buffer for :attr:`body` in bytes.
MEMFILE_MAX = 102400
def __init__(self, environ=None):
""" Wrap a WSGI environ dictionary. """
#: The wrapped WSGI environ dictionary. This is the only real attribute.
#: All other attributes actually are read-only properties.
self.environ = {} if environ is None else environ
self.environ['bottle.request'] = self
@DictProperty('environ', 'bottle.app', read_only=True)
def app(self):
""" Bottle application handling this request. """
raise RuntimeError('This request is not connected to an application.')
@DictProperty('environ', 'bottle.route', read_only=True)
def route(self):
""" The bottle :class:`Route` object that matches this request. """
raise RuntimeError('This request is not connected to a route.')
@DictProperty('environ', 'route.url_args', read_only=True)
def url_args(self):
""" The arguments extracted from the URL. """
raise RuntimeError('This request is not connected to a route.')
@property
def path(self):
""" The value of ``PATH_INFO`` with exactly one prefixed slash (to fix
broken clients and avoid the "empty path" edge case). """
return '/' + self.environ.get('PATH_INFO','').lstrip('/')
@property
def method(self):
""" The ``REQUEST_METHOD`` value as an uppercase string. """
return self.environ.get('REQUEST_METHOD', 'GET').upper()
@DictProperty('environ', 'bottle.request.headers', read_only=True)
def headers(self):
""" A :class:`WSGIHeaderDict` that provides case-insensitive access to
HTTP request headers. """
return WSGIHeaderDict(self.environ)
def get_header(self, name, default=None):
""" Return the value of a request header, or a given default value. """
return self.headers.get(name, default)
@DictProperty('environ', 'bottle.request.cookies', read_only=True)
def cookies(self):
""" Cookies parsed into a :class:`FormsDict`. Signed cookies are NOT
decoded. Use :meth:`get_cookie` if you expect signed cookies. """
cookies = SimpleCookie(self.environ.get('HTTP_COOKIE','')).values()
return FormsDict((c.key, c.value) for c in cookies)
def get_cookie(self, key, default=None, secret=None):
""" Return the content of a cookie. To read a `Signed Cookie`, the
`secret` must match the one used to create the cookie (see
:meth:`BaseResponse.set_cookie`). If anything goes wrong (missing
cookie or wrong signature), return a default value. """
value = self.cookies.get(key)
if secret and value:
dec = cookie_decode(value, secret) # (key, value) tuple or None
return dec[1] if dec and dec[0] == key else default
return value or default
@DictProperty('environ', 'bottle.request.query', read_only=True)
def query(self):
""" The :attr:`query_string` parsed into a :class:`FormsDict`. These
values are sometimes called "URL arguments" or "GET parameters", but
not to be confused with "URL wildcards" as they are provided by the
:class:`Router`. """
get = self.environ['bottle.get'] = FormsDict()
pairs = _parse_qsl(self.environ.get('QUERY_STRING', ''))
for key, value in pairs:
get[key] = value
return get
@DictProperty('environ', 'bottle.request.forms', read_only=True)
def forms(self):
""" Form values parsed from an `url-encoded` or `multipart/form-data`
encoded POST or PUT request body. The result is returned as a
:class:`FormsDict`. All keys and values are strings. File uploads
are stored separately in :attr:`files`. """
forms = FormsDict()
for name, item in self.POST.allitems():
if not isinstance(item, FileUpload):
forms[name] = item
return forms
@DictProperty('environ', 'bottle.request.params', read_only=True)
def params(self):
""" A :class:`FormsDict` with the combined values of :attr:`query` and
:attr:`forms`. File uploads are stored in :attr:`files`. """
params = FormsDict()
for key, value in self.query.allitems():
params[key] = value
for key, value in self.forms.allitems():
params[key] = value
return params
@DictProperty('environ', 'bottle.request.files', read_only=True)
def files(self):
""" File uploads parsed from `multipart/form-data` encoded POST or PUT
request body. The values are instances of :class:`FileUpload`.
"""
files = FormsDict()
for name, item in self.POST.allitems():
if isinstance(item, FileUpload):
files[name] = item
return files
@DictProperty('environ', 'bottle.request.json', read_only=True)
def json(self):
""" If the ``Content-Type`` header is ``application/json``, this
property holds the parsed content of the request body. Only requests
smaller than :attr:`MEMFILE_MAX` are processed to avoid memory
exhaustion. """
ctype = self.environ.get('CONTENT_TYPE', '').lower().split(';')[0]
if ctype == 'application/json':
b = self._get_body_string()
if not b:
return None
return json_loads(b)
return None
def _iter_body(self, read, bufsize):
maxread = max(0, self.content_length)
while maxread:
part = read(min(maxread, bufsize))
if not part: break
yield part
maxread -= len(part)
@staticmethod
def _iter_chunked(read, bufsize):
err = HTTPError(400, 'Error while parsing chunked transfer body.')
rn, sem, bs = tob('\r\n'), tob(';'), tob('')
while True:
header = read(1)
while header[-2:] != rn:
c = read(1)
header += c
if not c: raise err
if len(header) > bufsize: raise err
size, _, _ = header.partition(sem)
try:
maxread = int(tonat(size.strip()), 16)
except ValueError:
raise err
if maxread == 0: break
buff = bs
while maxread > 0:
if not buff:
buff = read(min(maxread, bufsize))
part, buff = buff[:maxread], buff[maxread:]
if not part: raise err
yield part
maxread -= len(part)
if read(2) != rn:
raise err
@DictProperty('environ', 'bottle.request.body', read_only=True)
def _body(self):
body_iter = self._iter_chunked if self.chunked else self._iter_body
read_func = self.environ['wsgi.input'].read
body, body_size, is_temp_file = BytesIO(), 0, False
for part in body_iter(read_func, self.MEMFILE_MAX):
body.write(part)
body_size += len(part)
if not is_temp_file and body_size > self.MEMFILE_MAX:
body, tmp = TemporaryFile(mode='w+b'), body
body.write(tmp.getvalue())
del tmp
is_temp_file = True
self.environ['wsgi.input'] = body
body.seek(0)
return body
def _get_body_string(self):
""" read body until content-length or MEMFILE_MAX into a string. Raise
HTTPError(413) on requests that are to large. """
clen = self.content_length
if clen > self.MEMFILE_MAX:
raise HTTPError(413, 'Request too large')
if clen < 0: clen = self.MEMFILE_MAX + 1
data = self.body.read(clen)
if len(data) > self.MEMFILE_MAX: # Fail fast
raise HTTPError(413, 'Request too large')
return data
@property
def body(self):
""" The HTTP request body as a seek-able file-like object. Depending on
:attr:`MEMFILE_MAX`, this is either a temporary file or a
:class:`io.BytesIO` instance. Accessing this property for the first
time reads and replaces the ``wsgi.input`` environ variable.
Subsequent accesses just do a `seek(0)` on the file object. """
self._body.seek(0)
return self._body
@property
def chunked(self):
""" True if Chunked transfer encoding was. """
return 'chunked' in self.environ.get('HTTP_TRANSFER_ENCODING', '').lower()
#: An alias for :attr:`query`.
GET = query
@DictProperty('environ', 'bottle.request.post', read_only=True)
def POST(self):
""" The values of :attr:`forms` and :attr:`files` combined into a single
:class:`FormsDict`. Values are either strings (form values) or
instances of :class:`cgi.FieldStorage` (file uploads).
"""
post = FormsDict()
# We default to application/x-www-form-urlencoded for everything that
# is not multipart and take the fast path (also: 3.1 workaround)
if not self.content_type.startswith('multipart/'):
pairs = _parse_qsl(tonat(self._get_body_string(), 'latin1'))
for key, value in pairs:
post[key] = value
return post
safe_env = {'QUERY_STRING':''} # Build a safe environment for cgi
for key in ('REQUEST_METHOD', 'CONTENT_TYPE', 'CONTENT_LENGTH'):
if key in self.environ: safe_env[key] = self.environ[key]
args = dict(fp=self.body, environ=safe_env, keep_blank_values=True)
if py31:
args['fp'] = NCTextIOWrapper(args['fp'], encoding='utf8',
newline='\n')
elif py3k:
args['encoding'] = 'utf8'
data = cgi.FieldStorage(**args)
self['_cgi.FieldStorage'] = data #http://bugs.python.org/issue18394#msg207958
data = data.list or []
for item in data:
if item.filename:
post[item.name] = FileUpload(item.file, item.name,
item.filename, item.headers)
else:
post[item.name] = item.value
return post
@property
def url(self):
""" The full request URI including hostname and scheme. If your app
lives behind a reverse proxy or load balancer and you get confusing
results, make sure that the ``X-Forwarded-Host`` header is set
correctly. """
return self.urlparts.geturl()
@DictProperty('environ', 'bottle.request.urlparts', read_only=True)
def urlparts(self):
""" The :attr:`url` string as an :class:`urlparse.SplitResult` tuple.
The tuple contains (scheme, host, path, query_string and fragment),
but the fragment is always empty because it is not visible to the
server. """
env = self.environ
http = env.get('HTTP_X_FORWARDED_PROTO') or env.get('wsgi.url_scheme', 'http')
host = env.get('HTTP_X_FORWARDED_HOST') or env.get('HTTP_HOST')
if not host:
# HTTP 1.1 requires a Host-header. This is for HTTP/1.0 clients.
host = env.get('SERVER_NAME', '127.0.0.1')
port = env.get('SERVER_PORT')
if port and port != ('80' if http == 'http' else '443'):
host += ':' + port
path = urlquote(self.fullpath)
return UrlSplitResult(http, host, path, env.get('QUERY_STRING'), '')
@property
def fullpath(self):
""" Request path including :attr:`script_name` (if present). """
return urljoin(self.script_name, self.path.lstrip('/'))
@property
def query_string(self):
""" The raw :attr:`query` part of the URL (everything in between ``?``
and ``#``) as a string. """
return self.environ.get('QUERY_STRING', '')
@property
def script_name(self):
""" The initial portion of the URL's `path` that was removed by a higher
level (server or routing middleware) before the application was
called. This script path is returned with leading and tailing
slashes. """
script_name = self.environ.get('SCRIPT_NAME', '').strip('/')
return '/' + script_name + '/' if script_name else '/'
def path_shift(self, shift=1):
""" Shift path segments from :attr:`path` to :attr:`script_name` and
vice versa.
:param shift: The number of path segments to shift. May be negative
to change the shift direction. (default: 1)
"""
script = self.environ.get('SCRIPT_NAME','/')
self['SCRIPT_NAME'], self['PATH_INFO'] = path_shift(script, self.path, shift)
@property
def content_length(self):
""" The request body length as an integer. The client is responsible to
set this header. Otherwise, the real length of the body is unknown
and -1 is returned. In this case, :attr:`body` will be empty. """
return int(self.environ.get('CONTENT_LENGTH') or -1)
@property
def content_type(self):
""" The Content-Type header as a lowercase-string (default: empty). """
return self.environ.get('CONTENT_TYPE', '').lower()
@property
def is_xhr(self):
""" True if the request was triggered by a XMLHttpRequest. This only
works with JavaScript libraries that support the `X-Requested-With`
header (most of the popular libraries do). """
requested_with = self.environ.get('HTTP_X_REQUESTED_WITH','')
return requested_with.lower() == 'xmlhttprequest'
@property
def is_ajax(self):
""" Alias for :attr:`is_xhr`. "Ajax" is not the right term. """
return self.is_xhr
@property
def auth(self):
""" HTTP authentication data as a (user, password) tuple. This
implementation currently supports basic (not digest) authentication
only. If the authentication happened at a higher level (e.g. in the
front web-server or a middleware), the password field is None, but
the user field is looked up from the ``REMOTE_USER`` environ
variable. On any errors, None is returned. """
basic = parse_auth(self.environ.get('HTTP_AUTHORIZATION',''))
if basic: return basic
ruser = self.environ.get('REMOTE_USER')
if ruser: return (ruser, None)
return None
@property
def remote_route(self):
""" A list of all IPs that were involved in this request, starting with
the client IP and followed by zero or more proxies. This does only
work if all proxies support the ```X-Forwarded-For`` header. Note
that this information can be forged by malicious clients. """
proxy = self.environ.get('HTTP_X_FORWARDED_FOR')
if proxy: return [ip.strip() for ip in proxy.split(',')]
remote = self.environ.get('REMOTE_ADDR')
return [remote] if remote else []
@property
def remote_addr(self):
""" The client IP as a string. Note that this information can be forged
by malicious clients. """
route = self.remote_route
return route[0] if route else None
def copy(self):
""" Return a new :class:`Request` with a shallow :attr:`environ` copy. """
return Request(self.environ.copy())
def get(self, value, default=None): return self.environ.get(value, default)
def __getitem__(self, key): return self.environ[key]
def __delitem__(self, key): self[key] = ""; del(self.environ[key])
def __iter__(self): return iter(self.environ)
def __len__(self): return len(self.environ)
def keys(self): return self.environ.keys()
def __setitem__(self, key, value):
""" Change an environ value and clear all caches that depend on it. """
if self.environ.get('bottle.request.readonly'):
raise KeyError('The environ dictionary is read-only.')
self.environ[key] = value
todelete = ()
if key == 'wsgi.input':
todelete = ('body', 'forms', 'files', 'params', 'post', 'json')
elif key == 'QUERY_STRING':
todelete = ('query', 'params')
elif key.startswith('HTTP_'):
todelete = ('headers', 'cookies')
for key in todelete:
self.environ.pop('bottle.request.'+key, None)
def __repr__(self):
return '<%s: %s %s>' % (self.__class__.__name__, self.method, self.url)
def __getattr__(self, name):
""" Search in self.environ for additional user defined attributes. """
try:
var = self.environ['bottle.request.ext.%s'%name]
return var.__get__(self) if hasattr(var, '__get__') else var
except KeyError:
raise AttributeError('Attribute %r not defined.' % name)
def __setattr__(self, name, value):
if name == 'environ': return object.__setattr__(self, name, value)
self.environ['bottle.request.ext.%s'%name] = value
def _hkey(s):
return s.title().replace('_','-')
class HeaderProperty(object):
def __init__(self, name, reader=None, writer=str, default=''):
self.name, self.default = name, default
self.reader, self.writer = reader, writer
self.__doc__ = 'Current value of the %r header.' % name.title()
def __get__(self, obj, _):
if obj is None: return self
value = obj.headers.get(self.name, self.default)
return self.reader(value) if self.reader else value
def __set__(self, obj, value):
obj.headers[self.name] = self.writer(value)
def __delete__(self, obj):
del obj.headers[self.name]
class BaseResponse(object):
""" Storage class for a response body as well as headers and cookies.
This class does support dict-like case-insensitive item-access to
headers, but is NOT a dict. Most notably, iterating over a response
yields parts of the body and not the headers.
:param body: The response body as one of the supported types.
:param status: Either an HTTP status code (e.g. 200) or a status line
including the reason phrase (e.g. '200 OK').
:param headers: A dictionary or a list of name-value pairs.
Additional keyword arguments are added to the list of headers.
Underscores in the header name are replaced with dashes.
"""
default_status = 200
default_content_type = 'text/html; charset=UTF-8'
# Header blacklist for specific response codes
# (rfc2616 section 10.2.3 and 10.3.5)
bad_headers = {
204: set(('Content-Type',)),
304: set(('Allow', 'Content-Encoding', 'Content-Language',
'Content-Length', 'Content-Range', 'Content-Type',
'Content-Md5', 'Last-Modified'))}
def __init__(self, body='', status=None, headers=None, **more_headers):
self._cookies = None
self._headers = {}
self.body = body
self.status = status or self.default_status
if headers:
if isinstance(headers, dict):
headers = headers.items()
for name, value in headers:
self.add_header(name, value)
if more_headers:
for name, value in more_headers.items():
self.add_header(name, value)
def copy(self, cls=None):
""" Returns a copy of self. """
cls = cls or BaseResponse
assert issubclass(cls, BaseResponse)
copy = cls()
copy.status = self.status
copy._headers = dict((k, v[:]) for (k, v) in self._headers.items())
if self._cookies:
copy._cookies = SimpleCookie()
copy._cookies.load(self._cookies.output())
return copy
def __iter__(self):
return iter(self.body)
def close(self):
if hasattr(self.body, 'close'):
self.body.close()
@property
def status_line(self):
""" The HTTP status line as a string (e.g. ``404 Not Found``)."""
return self._status_line
@property
def status_code(self):
""" The HTTP status code as an integer (e.g. 404)."""
return self._status_code
def _set_status(self, status):
if isinstance(status, int):
code, status = status, _HTTP_STATUS_LINES.get(status)
elif ' ' in status:
status = status.strip()
code = int(status.split()[0])
else:
raise ValueError('String status line without a reason phrase.')
if not 100 <= code <= 999: raise ValueError('Status code out of range.')
self._status_code = code
self._status_line = str(status or ('%d Unknown' % code))
def _get_status(self):
return self._status_line
status = property(_get_status, _set_status, None,
''' A writeable property to change the HTTP response status. It accepts
either a numeric code (100-999) or a string with a custom reason
phrase (e.g. "404 Brain not found"). Both :data:`status_line` and
:data:`status_code` are updated accordingly. The return value is
always a status string. ''')
del _get_status, _set_status
@property
def headers(self):
""" An instance of :class:`HeaderDict`, a case-insensitive dict-like
view on the response headers. """
hdict = HeaderDict()
hdict.dict = self._headers
return hdict
def __contains__(self, name): return _hkey(name) in self._headers
def __delitem__(self, name): del self._headers[_hkey(name)]
def __getitem__(self, name): return self._headers[_hkey(name)][-1]
def __setitem__(self, name, value): self._headers[_hkey(name)] = [str(value)]
def get_header(self, name, default=None):
""" Return the value of a previously defined header. If there is no
header with that name, return a default value. """
return self._headers.get(_hkey(name), [default])[-1]
def set_header(self, name, value):
""" Create a new response header, replacing any previously defined
headers with the same name. """
self._headers[_hkey(name)] = [value if isinstance(value, unicode) else str(value)]
def add_header(self, name, value):
""" Add an additional response header, not removing duplicates. """
self._headers.setdefault(_hkey(name), []).append(str(value))
def iter_headers(self):
""" Yield (header, value) tuples, skipping headers that are not
allowed with the current response status code. """
return self.headerlist
@property
def headerlist(self):
""" WSGI conform list of (header, value) tuples. """
out = []
headers = list(self._headers.items())
if 'Content-Type' not in self._headers:
headers.append(('Content-Type', [self.default_content_type]))
if self._status_code in self.bad_headers:
bad_headers = self.bad_headers[self._status_code]
headers = [h for h in headers if h[0] not in bad_headers]
out += [(name, val) for (name, vals) in headers for val in vals]
if self._cookies:
for c in self._cookies.values():
out.append(('Set-Cookie', c.OutputString()))
if py3k:
out = [
(k, v.encode('utf8').decode('latin1')
if isinstance(v, unicode) else v) for (k, v) in out]
return out
content_type = HeaderProperty('Content-Type')
content_length = HeaderProperty('Content-Length', reader=int)
expires = HeaderProperty('Expires',
reader=lambda x: datetime.utcfromtimestamp(parse_date(x)),
writer=lambda x: http_date(x))
@property
def charset(self, default='UTF-8'):
""" Return the charset specified in the content-type header (default: utf8). """
if 'charset=' in self.content_type:
return self.content_type.split('charset=')[-1].split(';')[0].strip()
return default
def set_cookie(self, name, value, secret=None, **options):
""" Create a new cookie or replace an old one. If the `secret` parameter is
set, create a `Signed Cookie` (described below).
:param name: the name of the cookie.
:param value: the value of the cookie.
:param secret: a signature key required for signed cookies.
Additionally, this method accepts all RFC 2109 attributes that are
supported by :class:`cookie.Morsel`, including:
:param max_age: maximum age in seconds. (default: None)
:param expires: a datetime object or UNIX timestamp. (default: None)
:param domain: the domain that is allowed to read the cookie.
(default: current domain)
:param path: limits the cookie to a given path (default: current path)
:param secure: limit the cookie to HTTPS connections (default: off).
:param httponly: prevents client-side javascript to read this cookie
(default: off, requires Python 2.6 or newer).
If neither `expires` nor `max_age` is set (default), the cookie will
expire at the end of the browser session (as soon as the browser
window is closed).
Signed cookies may store any pickle-able object and are
cryptographically signed to prevent manipulation. Keep in mind that
cookies are limited to 4kb in most browsers.
Warning: Signed cookies are not encrypted (the client can still see
the content) and not copy-protected (the client can restore an old
cookie). The main intention is to make pickling and unpickling
save, not to store secret information at client side.
"""
if not self._cookies:
self._cookies = SimpleCookie()
if secret:
value = touni(cookie_encode((name, value), secret))
elif not isinstance(value, basestring):
raise TypeError('Secret key missing for non-string Cookie.')
if len(value) > 4096: raise ValueError('Cookie value to long.')
self._cookies[name] = value
for key, value in options.items():
if key == 'max_age':
if isinstance(value, timedelta):
value = value.seconds + value.days * 24 * 3600
if key == 'expires':
if isinstance(value, (datedate, datetime)):
value = value.timetuple()
elif isinstance(value, (int, float)):
value = time.gmtime(value)
value = time.strftime("%a, %d %b %Y %H:%M:%S GMT", value)
self._cookies[name][key.replace('_', '-')] = value
def delete_cookie(self, key, **kwargs):
""" Delete a cookie. Be sure to use the same `domain` and `path`
settings as used to create the cookie. """
kwargs['max_age'] = -1
kwargs['expires'] = 0
self.set_cookie(key, '', **kwargs)
def __repr__(self):
out = ''
for name, value in self.headerlist:
out += '%s: %s\n' % (name.title(), value.strip())
return out
def _local_property():
ls = threading.local()
def fget(_):
try: return ls.var
except AttributeError:
raise RuntimeError("Request context not initialized.")
def fset(_, value): ls.var = value
def fdel(_): del ls.var
return property(fget, fset, fdel, 'Thread-local property')
class LocalRequest(BaseRequest):
""" A thread-local subclass of :class:`BaseRequest` with a different
set of attributes for each thread. There is usually only one global
instance of this class (:data:`request`). If accessed during a
request/response cycle, this instance always refers to the *current*
request (even on a multithreaded server). """
bind = BaseRequest.__init__
environ = _local_property()
class LocalResponse(BaseResponse):
""" A thread-local subclass of :class:`BaseResponse` with a different
set of attributes for each thread. There is usually only one global
instance of this class (:data:`response`). Its attributes are used
to build the HTTP response at the end of the request/response cycle.
"""
bind = BaseResponse.__init__
_status_line = _local_property()
_status_code = _local_property()
_cookies = _local_property()
_headers = _local_property()
body = _local_property()
Request = BaseRequest
Response = BaseResponse
class HTTPResponse(Response, BottleException):
def __init__(self, body='', status=None, headers=None, **more_headers):
super(HTTPResponse, self).__init__(body, status, headers, **more_headers)
def apply(self, other):
other._status_code = self._status_code
other._status_line = self._status_line
other._headers = self._headers
other._cookies = self._cookies
other.body = self.body
class HTTPError(HTTPResponse):
default_status = 500
def __init__(self, status=None, body=None, exception=None, traceback=None,
**options):
self.exception = exception
self.traceback = traceback
super(HTTPError, self).__init__(body, status, **options)
###############################################################################
# Plugins ######################################################################
###############################################################################
class PluginError(BottleException): pass
class JSONPlugin(object):
name = 'json'
api = 2
def __init__(self, json_dumps=json_dumps):
self.json_dumps = json_dumps
def apply(self, callback, _):
dumps = self.json_dumps
if not dumps: return callback
def wrapper(*a, **ka):
try:
rv = callback(*a, **ka)
except HTTPError:
rv = _e()
if isinstance(rv, dict):
#Attempt to serialize, raises exception on failure
json_response = dumps(rv)
#Set content type only if serialization successful
response.content_type = 'application/json'
return json_response
elif isinstance(rv, HTTPResponse) and isinstance(rv.body, dict):
rv.body = dumps(rv.body)
rv.content_type = 'application/json'
return rv
return wrapper
class TemplatePlugin(object):
""" This plugin applies the :func:`view` decorator to all routes with a
`template` config parameter. If the parameter is a tuple, the second
element must be a dict with additional options (e.g. `template_engine`)
or default variables for the template. """
name = 'template'
api = 2
def apply(self, callback, route):
conf = route.config.get('template')
if isinstance(conf, (tuple, list)) and len(conf) == 2:
return view(conf[0], **conf[1])(callback)
elif isinstance(conf, str):
return view(conf)(callback)
else:
return callback
#: Not a plugin, but part of the plugin API. TODO: Find a better place.
class _ImportRedirect(object):
def __init__(self, name, impmask):
""" Create a virtual package that redirects imports (see PEP 302). """
self.name = name
self.impmask = impmask
self.module = sys.modules.setdefault(name, imp.new_module(name))
self.module.__dict__.update({'__file__': __file__, '__path__': [],
'__all__': [], '__loader__': self})
sys.meta_path.append(self)
def find_module(self, fullname, path=None):
if '.' not in fullname: return
packname = fullname.rsplit('.', 1)[0]
if packname != self.name: return
return self
def load_module(self, fullname):
if fullname in sys.modules: return sys.modules[fullname]
modname = fullname.rsplit('.', 1)[1]
realname = self.impmask % modname
__import__(realname)
module = sys.modules[fullname] = sys.modules[realname]
setattr(self.module, modname, module)
module.__loader__ = self
return module
###############################################################################
# Common Utilities #############################################################
###############################################################################
class MultiDict(DictMixin):
""" This dict stores multiple values per key, but behaves exactly like a
normal dict in that it returns only the newest value for any given key.
There are special methods available to access the full list of values.
"""
def __init__(self, *a, **k):
self.dict = dict((k, [v]) for (k, v) in dict(*a, **k).items())
def __len__(self): return len(self.dict)
def __iter__(self): return iter(self.dict)
def __contains__(self, key): return key in self.dict
def __delitem__(self, key): del self.dict[key]
def __getitem__(self, key): return self.dict[key][-1]
def __setitem__(self, key, value): self.append(key, value)
def keys(self): return self.dict.keys()
if py3k:
def values(self): return (v[-1] for v in self.dict.values())
def items(self): return ((k, v[-1]) for k, v in self.dict.items())
def allitems(self):
return ((k, v) for k, vl in self.dict.items() for v in vl)
iterkeys = keys
itervalues = values
iteritems = items
iterallitems = allitems
else:
def values(self): return [v[-1] for v in self.dict.values()]
def items(self): return [(k, v[-1]) for k, v in self.dict.items()]
def iterkeys(self): return self.dict.iterkeys()
def itervalues(self): return (v[-1] for v in self.dict.itervalues())
def iteritems(self):
return ((k, v[-1]) for k, v in self.dict.iteritems())
def iterallitems(self):
return ((k, v) for k, vl in self.dict.iteritems() for v in vl)
def allitems(self):
return [(k, v) for k, vl in self.dict.iteritems() for v in vl]
def get(self, key, default=None, index=-1, type=None):
""" Return the most recent value for a key.
:param default: The default value to be returned if the key is not
present or the type conversion fails.
:param index: An index for the list of available values.
:param type: If defined, this callable is used to cast the value
into a specific type. Exception are suppressed and result in
the default value to be returned.
"""
try:
val = self.dict[key][index]
return type(val) if type else val
except Exception:
pass
return default
def append(self, key, value):
""" Add a new value to the list of values for this key. """
self.dict.setdefault(key, []).append(value)
def replace(self, key, value):
""" Replace the list of values with a single value. """
self.dict[key] = [value]
def getall(self, key):
""" Return a (possibly empty) list of values for a key. """
return self.dict.get(key) or []
#: Aliases for WTForms to mimic other multi-dict APIs (Django)
getone = get
getlist = getall
class FormsDict(MultiDict):
""" This :class:`MultiDict` subclass is used to store request form data.
Additionally to the normal dict-like item access methods (which return
unmodified data as native strings), this container also supports
attribute-like access to its values. Attributes are automatically de-
or recoded to match :attr:`input_encoding` (default: 'utf8'). Missing
attributes default to an empty string. """
#: Encoding used for attribute values.
input_encoding = 'utf8'
#: If true (default), unicode strings are first encoded with `latin1`
#: and then decoded to match :attr:`input_encoding`.
recode_unicode = True
def _fix(self, s, encoding=None):
if isinstance(s, unicode) and self.recode_unicode: # Python 3 WSGI
return s.encode('latin1').decode(encoding or self.input_encoding)
elif isinstance(s, bytes): # Python 2 WSGI
return s.decode(encoding or self.input_encoding)
else:
return s
def decode(self, encoding=None):
""" Returns a copy with all keys and values de- or recoded to match
:attr:`input_encoding`. Some libraries (e.g. WTForms) want a
unicode dictionary. """
copy = FormsDict()
enc = copy.input_encoding = encoding or self.input_encoding
copy.recode_unicode = False
for key, value in self.allitems():
copy.append(self._fix(key, enc), self._fix(value, enc))
return copy
def getunicode(self, name, default=None, encoding=None):
""" Return the value as a unicode string, or the default. """
try:
return self._fix(self[name], encoding)
except (UnicodeError, KeyError):
return default
def __getattr__(self, name, default=unicode()):
# Without this guard, pickle generates a cryptic TypeError:
if name.startswith('__') and name.endswith('__'):
return super(FormsDict, self).__getattr__(name)
return self.getunicode(name, default=default)
class HeaderDict(MultiDict):
""" A case-insensitive version of :class:`MultiDict` that defaults to
replace the old value instead of appending it. """
def __init__(self, *a, **ka):
self.dict = {}
if a or ka: self.update(*a, **ka)
def __contains__(self, key): return _hkey(key) in self.dict
def __delitem__(self, key): del self.dict[_hkey(key)]
def __getitem__(self, key): return self.dict[_hkey(key)][-1]
def __setitem__(self, key, value): self.dict[_hkey(key)] = [str(value)]
def append(self, key, value):
self.dict.setdefault(_hkey(key), []).append(str(value))
def replace(self, key, value): self.dict[_hkey(key)] = [str(value)]
def getall(self, key): return self.dict.get(_hkey(key)) or []
def get(self, key, default=None, index=-1):
return MultiDict.get(self, _hkey(key), default, index)
def filter(self, names):
for name in [_hkey(n) for n in names]:
if name in self.dict:
del self.dict[name]
class WSGIHeaderDict(DictMixin):
""" This dict-like class wraps a WSGI environ dict and provides convenient
access to HTTP_* fields. Keys and values are native strings
(2.x bytes or 3.x unicode) and keys are case-insensitive. If the WSGI
environment contains non-native string values, these are de- or encoded
using a lossless 'latin1' character set.
The API will remain stable even on changes to the relevant PEPs.
Currently PEP 333, 444 and 3333 are supported. (PEP 444 is the only one
that uses non-native strings.)
"""
#: List of keys that do not have a ``HTTP_`` prefix.
cgikeys = ('CONTENT_TYPE', 'CONTENT_LENGTH')
def __init__(self, environ):
self.environ = environ
def _ekey(self, key):
""" Translate header field name to CGI/WSGI environ key. """
key = key.replace('-','_').upper()
if key in self.cgikeys:
return key
return 'HTTP_' + key
def raw(self, key, default=None):
""" Return the header value as is (may be bytes or unicode). """
return self.environ.get(self._ekey(key), default)
def __getitem__(self, key):
val = self.environ[self._ekey(key)]
if py3k:
if isinstance(val, unicode):
val = val.encode('latin1').decode('utf8')
else:
val = val.decode('utf8')
return val
def __setitem__(self, key, value):
raise TypeError("%s is read-only." % self.__class__)
def __delitem__(self, key):
raise TypeError("%s is read-only." % self.__class__)
def __iter__(self):
for key in self.environ:
if key[:5] == 'HTTP_':
yield _hkey(key[5:])
elif key in self.cgikeys:
yield _hkey(key)
def keys(self): return [x for x in self]
def __len__(self): return len(self.keys())
def __contains__(self, key): return self._ekey(key) in self.environ
class ConfigDict(dict):
""" A dict-like configuration storage with additional support for
namespaces, validators, meta-data, on_change listeners and more.
"""
__slots__ = ('_meta', '_on_change')
def __init__(self):
self._meta = {}
self._on_change = lambda name, value: None
def load_config(self, filename):
""" Load values from an ``*.ini`` style config file.
If the config file contains sections, their names are used as
namespaces for the values within. The two special sections
``DEFAULT`` and ``bottle`` refer to the root namespace (no prefix).
"""
conf = ConfigParser()
conf.read(filename)
for section in conf.sections():
for key, value in conf.items(section):
if section not in ('DEFAULT', 'bottle'):
key = section + '.' + key
self[key] = value
return self
def load_dict(self, source, namespace=''):
""" Load values from a dictionary structure. Nesting can be used to
represent namespaces.
>>> c = ConfigDict()
>>> c.load_dict({'some': {'namespace': {'key': 'value'} } })
{'some.namespace.key': 'value'}
"""
for key, value in source.items():
if isinstance(key, str):
nskey = (namespace + '.' + key).strip('.')
if isinstance(value, dict):
self.load_dict(value, namespace=nskey)
else:
self[nskey] = value
else:
raise TypeError('Key has type %r (not a string)' % type(key))
return self
def update(self, *a, **ka):
""" If the first parameter is a string, all keys are prefixed with this
namespace. Apart from that it works just as the usual dict.update().
Example: ``update('some.namespace', key='value')`` """
prefix = ''
if a and isinstance(a[0], str):
prefix = a[0].strip('.') + '.'
a = a[1:]
for key, value in dict(*a, **ka).items():
self[prefix+key] = value
def setdefault(self, key, value):
if key not in self:
self[key] = value
return self[key]
def __setitem__(self, key, value):
if not isinstance(key, str):
raise TypeError('Key has type %r (not a string)' % type(key))
value = self.meta_get(key, 'filter', lambda x: x)(value)
if key in self and self[key] is value:
return
self._on_change(key, value)
dict.__setitem__(self, key, value)
def __delitem__(self, key):
self._on_change(key, None)
dict.__delitem__(self, key)
def meta_get(self, key, metafield, default=None):
""" Return the value of a meta field for a key. """
return self._meta.get(key, {}).get(metafield, default)
def meta_set(self, key, metafield, value):
""" Set the meta field for a key to a new value. This triggers the
on-change handler for existing keys. """
self._meta.setdefault(key, {})[metafield] = value
if key in self:
self[key] = self[key]
def meta_list(self, key):
""" Return an iterable of meta field names defined for a key. """
return self._meta.get(key, {}).keys()
class AppStack(list):
""" A stack-like list. Calling it returns the head of the stack. """
def __call__(self):
""" Return the current default application. """
return self[-1]
def push(self, value=None):
""" Add a new :class:`Bottle` instance to the stack """
if not isinstance(value, Bottle):
value = Bottle()
self.append(value)
return value
class WSGIFileWrapper(object):
def __init__(self, fp, buffer_size=1024*64):
self.fp, self.buffer_size = fp, buffer_size
for attr in ('fileno', 'close', 'read', 'readlines', 'tell', 'seek'):
if hasattr(fp, attr): setattr(self, attr, getattr(fp, attr))
def __iter__(self):
buff, read = self.buffer_size, self.read
while True:
part = read(buff)
if not part: return
yield part
class _closeiter(object):
""" This only exists to be able to attach a .close method to iterators that
do not support attribute assignment (most of itertools). """
def __init__(self, iterator, close=None):
self.iterator = iterator
self.close_callbacks = makelist(close)
def __iter__(self):
return iter(self.iterator)
def close(self):
for func in self.close_callbacks:
func()
class ResourceManager(object):
""" This class manages a list of search paths and helps to find and open
application-bound resources (files).
:param base: default value for :meth:`add_path` calls.
:param opener: callable used to open resources.
:param cachemode: controls which lookups are cached. One of 'all',
'found' or 'none'.
"""
def __init__(self, base='./', opener=open, cachemode='all'):
self.opener = opener
self.base = base
self.cachemode = cachemode
#: A list of search paths. See :meth:`add_path` for details.
self.path = []
#: A cache for resolved paths. ``res.cache.clear()`` clears the cache.
self.cache = {}
def add_path(self, path, base=None, index=None, create=False):
""" Add a new path to the list of search paths. Return False if the
path does not exist.
:param path: The new search path. Relative paths are turned into
an absolute and normalized form. If the path looks like a file
(not ending in `/`), the filename is stripped off.
:param base: Path used to absolutize relative search paths.
Defaults to :attr:`base` which defaults to ``os.getcwd()``.
:param index: Position within the list of search paths. Defaults
to last index (appends to the list).
The `base` parameter makes it easy to reference files installed
along with a python module or package::
res.add_path('./resources/', __file__)
"""
base = os.path.abspath(os.path.dirname(base or self.base))
path = os.path.abspath(os.path.join(base, os.path.dirname(path)))
path += os.sep
if path in self.path:
self.path.remove(path)
if create and not os.path.isdir(path):
os.makedirs(path)
if index is None:
self.path.append(path)
else:
self.path.insert(index, path)
self.cache.clear()
return os.path.exists(path)
def __iter__(self):
""" Iterate over all existing files in all registered paths. """
search = self.path[:]
while search:
path = search.pop()
if not os.path.isdir(path): continue
for name in os.listdir(path):
full = os.path.join(path, name)
if os.path.isdir(full): search.append(full)
else: yield full
def lookup(self, name):
""" Search for a resource and return an absolute file path, or `None`.
The :attr:`path` list is searched in order. The first match is
returend. Symlinks are followed. The result is cached to speed up
future lookups. """
if name not in self.cache or DEBUG:
for path in self.path:
fpath = os.path.join(path, name)
if os.path.isfile(fpath):
if self.cachemode in ('all', 'found'):
self.cache[name] = fpath
return fpath
if self.cachemode == 'all':
self.cache[name] = None
return self.cache[name]
def open(self, name, mode='r', *args, **kwargs):
""" Find a resource and return a file object, or raise IOError. """
fname = self.lookup(name)
if not fname: raise IOError("Resource %r not found." % name)
return self.opener(fname, mode=mode, *args, **kwargs)
class FileUpload(object):
def __init__(self, fileobj, name, filename, headers=None):
""" Wrapper for file uploads. """
#: Open file(-like) object (BytesIO buffer or temporary file)
self.file = fileobj
#: Name of the upload form field
self.name = name
#: Raw filename as sent by the client (may contain unsafe characters)
self.raw_filename = filename
#: A :class:`HeaderDict` with additional headers (e.g. content-type)
self.headers = HeaderDict(headers) if headers else HeaderDict()
content_type = HeaderProperty('Content-Type')
content_length = HeaderProperty('Content-Length', reader=int, default=-1)
@cached_property
def filename(self):
""" Name of the file on the client file system, but normalized to ensure
file system compatibility. An empty filename is returned as 'empty'.
Only ASCII letters, digits, dashes, underscores and dots are
allowed in the final filename. Accents are removed, if possible.
Whitespace is replaced by a single dash. Leading or tailing dots
or dashes are removed. The filename is limited to 255 characters.
"""
fname = self.raw_filename
if not isinstance(fname, unicode):
fname = fname.decode('utf8', 'ignore')
fname = normalize('NFKD', fname).encode('ASCII', 'ignore').decode('ASCII')
fname = os.path.basename(fname.replace('\\', os.path.sep))
fname = re.sub(r'[^a-zA-Z0-9-_.\s]', '', fname).strip()
fname = re.sub(r'[-\s]+', '-', fname).strip('.-')
return fname[:255] or 'empty'
def _copy_file(self, fp, chunk_size=2**16):
read, write, offset = self.file.read, fp.write, self.file.tell()
while 1:
buf = read(chunk_size)
if not buf: break
write(buf)
self.file.seek(offset)
def save(self, destination, overwrite=False, chunk_size=2**16):
""" Save file to disk or copy its content to an open file(-like) object.
If *destination* is a directory, :attr:`filename` is added to the
path. Existing files are not overwritten by default (IOError).
:param destination: File path, directory or file(-like) object.
:param overwrite: If True, replace existing files. (default: False)
:param chunk_size: Bytes to read at a time. (default: 64kb)
"""
if isinstance(destination, basestring): # Except file-likes here
if os.path.isdir(destination):
destination = os.path.join(destination, self.filename)
if not overwrite and os.path.exists(destination):
raise IOError('File exists.')
with open(destination, 'wb') as fp:
self._copy_file(fp, chunk_size)
else:
self._copy_file(destination, chunk_size)
###############################################################################
# Application Helper ###########################################################
###############################################################################
def abort(code=500, text='Unknown Error.'):
""" Aborts execution and causes a HTTP error. """
raise HTTPError(code, text)
def redirect(url, code=None):
""" Aborts execution and causes a 303 or 302 redirect, depending on
the HTTP protocol version. """
if not code:
code = 303 if request.get('SERVER_PROTOCOL') == "HTTP/1.1" else 302
res = response.copy(cls=HTTPResponse)
res.status = code
res.body = ""
res.set_header('Location', urljoin(request.url, url))
raise res
def _file_iter_range(fp, offset, bytes, maxread=1024*1024):
""" Yield chunks from a range in a file. No chunk is bigger than maxread."""
fp.seek(offset)
while bytes > 0:
part = fp.read(min(bytes, maxread))
if not part: break
bytes -= len(part)
yield part
def static_file(filename, root, mimetype='auto', download=False, charset='UTF-8'):
""" Open a file in a safe way and return :exc:`HTTPResponse` with status
code 200, 305, 403 or 404. The ``Content-Type``, ``Content-Encoding``,
``Content-Length`` and ``Last-Modified`` headers are set if possible.
Special support for ``If-Modified-Since``, ``Range`` and ``HEAD``
requests.
:param filename: Name or path of the file to send.
:param root: Root path for file lookups. Should be an absolute directory
path.
:param mimetype: Defines the content-type header (default: guess from
file extension)
:param download: If True, ask the browser to open a `Save as...` dialog
instead of opening the file with the associated program. You can
specify a custom filename as a string. If not specified, the
original filename is used (default: False).
:param charset: The charset to use for files with a ``text/*``
mime-type. (default: UTF-8)
"""
root = os.path.abspath(root) + os.sep
filename = os.path.abspath(os.path.join(root, filename.strip('/\\')))
headers = dict()
if not filename.startswith(root):
return HTTPError(403, "Access denied.")
if not os.path.exists(filename) or not os.path.isfile(filename):
return HTTPError(404, "File does not exist.")
if not os.access(filename, os.R_OK):
return HTTPError(403, "You do not have permission to access this file.")
if mimetype == 'auto':
mimetype, encoding = mimetypes.guess_type(filename)
if encoding: headers['Content-Encoding'] = encoding
if mimetype:
if mimetype[:5] == 'text/' and charset and 'charset' not in mimetype:
mimetype += '; charset=%s' % charset
headers['Content-Type'] = mimetype
if download:
download = os.path.basename(filename if download == True else download)
headers['Content-Disposition'] = 'attachment; filename="%s"' % download
stats = os.stat(filename)
headers['Content-Length'] = clen = stats.st_size
lm = time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(stats.st_mtime))
headers['Last-Modified'] = lm
ims = request.environ.get('HTTP_IF_MODIFIED_SINCE')
if ims:
ims = parse_date(ims.split(";")[0].strip())
if ims is not None and ims >= int(stats.st_mtime):
headers['Date'] = time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime())
return HTTPResponse(status=304, **headers)
body = '' if request.method == 'HEAD' else open(filename, 'rb')
headers["Accept-Ranges"] = "bytes"
ranges = request.environ.get('HTTP_RANGE')
if 'HTTP_RANGE' in request.environ:
ranges = list(parse_range_header(request.environ['HTTP_RANGE'], clen))
if not ranges:
return HTTPError(416, "Requested Range Not Satisfiable")
offset, end = ranges[0]
headers["Content-Range"] = "bytes %d-%d/%d" % (offset, end-1, clen)
headers["Content-Length"] = str(end-offset)
if body: body = _file_iter_range(body, offset, end-offset)
return HTTPResponse(body, status=206, **headers)
return HTTPResponse(body, **headers)
###############################################################################
# HTTP Utilities and MISC (TODO) ###############################################
###############################################################################
def debug(mode=True):
""" Change the debug level.
There is only one debug level supported at the moment."""
global DEBUG
if mode: warnings.simplefilter('default')
DEBUG = bool(mode)
def http_date(value):
if isinstance(value, (datedate, datetime)):
value = value.utctimetuple()
elif isinstance(value, (int, float)):
value = time.gmtime(value)
if not isinstance(value, basestring):
value = time.strftime("%a, %d %b %Y %H:%M:%S GMT", value)
return value
def parse_date(ims):
""" Parse rfc1123, rfc850 and asctime timestamps and return UTC epoch. """
try:
ts = email.utils.parsedate_tz(ims)
return time.mktime(ts[:8] + (0,)) - (ts[9] or 0) - time.timezone
except (TypeError, ValueError, IndexError, OverflowError):
return None
def parse_auth(header):
""" Parse rfc2617 HTTP authentication header string (basic) and return (user,pass) tuple or None"""
try:
method, data = header.split(None, 1)
if method.lower() == 'basic':
user, pwd = touni(base64.b64decode(tob(data))).split(':',1)
return user, pwd
except (KeyError, ValueError):
return None
def parse_range_header(header, maxlen=0):
""" Yield (start, end) ranges parsed from a HTTP Range header. Skip
unsatisfiable ranges. The end index is non-inclusive."""
if not header or header[:6] != 'bytes=': return
ranges = [r.split('-', 1) for r in header[6:].split(',') if '-' in r]
for start, end in ranges:
try:
if not start: # bytes=-100 -> last 100 bytes
start, end = max(0, maxlen-int(end)), maxlen
elif not end: # bytes=100- -> all but the first 99 bytes
start, end = int(start), maxlen
else: # bytes=100-200 -> bytes 100-200 (inclusive)
start, end = int(start), min(int(end)+1, maxlen)
if 0 <= start < end <= maxlen:
yield start, end
except ValueError:
pass
def _parse_qsl(qs):
r = []
for pair in qs.replace(';','&').split('&'):
if not pair: continue
nv = pair.split('=', 1)
if len(nv) != 2: nv.append('')
key = urlunquote(nv[0].replace('+', ' '))
value = urlunquote(nv[1].replace('+', ' '))
r.append((key, value))
return r
def _lscmp(a, b):
""" Compares two strings in a cryptographically safe way:
Runtime is not affected by length of common prefix. """
return not sum(0 if x==y else 1 for x, y in zip(a, b)) and len(a) == len(b)
def cookie_encode(data, key):
""" Encode and sign a pickle-able object. Return a (byte) string """
msg = base64.b64encode(pickle.dumps(data, -1))
sig = base64.b64encode(hmac.new(tob(key), msg).digest())
return tob('!') + sig + tob('?') + msg
def cookie_decode(data, key):
""" Verify and decode an encoded string. Return an object or None."""
data = tob(data)
if cookie_is_encoded(data):
sig, msg = data.split(tob('?'), 1)
if _lscmp(sig[1:], base64.b64encode(hmac.new(tob(key), msg).digest())):
return pickle.loads(base64.b64decode(msg))
return None
def cookie_is_encoded(data):
""" Return True if the argument looks like a encoded cookie."""
return bool(data.startswith(tob('!')) and tob('?') in data)
def html_escape(string):
""" Escape HTML special characters ``&<>`` and quotes ``'"``. """
return string.replace('&','&').replace('<','<').replace('>','>')\
.replace('"','"').replace("'",''')
def html_quote(string):
""" Escape and quote a string to be used as an HTTP attribute."""
return '"%s"' % html_escape(string).replace('\n',' ')\
.replace('\r',' ').replace('\t','	')
def yieldroutes(func):
""" Return a generator for routes that match the signature (name, args)
of the func parameter. This may yield more than one route if the function
takes optional keyword arguments. The output is best described by example::
a() -> '/a'
b(x, y) -> '/b/<x>/<y>'
c(x, y=5) -> '/c/<x>' and '/c/<x>/<y>'
d(x=5, y=6) -> '/d' and '/d/<x>' and '/d/<x>/<y>'
"""
path = '/' + func.__name__.replace('__','/').lstrip('/')
spec = getargspec(func)
argc = len(spec[0]) - len(spec[3] or [])
path += ('/<%s>' * argc) % tuple(spec[0][:argc])
yield path
for arg in spec[0][argc:]:
path += '/<%s>' % arg
yield path
def path_shift(script_name, path_info, shift=1):
""" Shift path fragments from PATH_INFO to SCRIPT_NAME and vice versa.
:return: The modified paths.
:param script_name: The SCRIPT_NAME path.
:param script_name: The PATH_INFO path.
:param shift: The number of path fragments to shift. May be negative to
change the shift direction. (default: 1)
"""
if shift == 0: return script_name, path_info
pathlist = path_info.strip('/').split('/')
scriptlist = script_name.strip('/').split('/')
if pathlist and pathlist[0] == '': pathlist = []
if scriptlist and scriptlist[0] == '': scriptlist = []
if 0 < shift <= len(pathlist):
moved = pathlist[:shift]
scriptlist = scriptlist + moved
pathlist = pathlist[shift:]
elif 0 > shift >= -len(scriptlist):
moved = scriptlist[shift:]
pathlist = moved + pathlist
scriptlist = scriptlist[:shift]
else:
empty = 'SCRIPT_NAME' if shift < 0 else 'PATH_INFO'
raise AssertionError("Cannot shift. Nothing left from %s" % empty)
new_script_name = '/' + '/'.join(scriptlist)
new_path_info = '/' + '/'.join(pathlist)
if path_info.endswith('/') and pathlist: new_path_info += '/'
return new_script_name, new_path_info
def auth_basic(check, realm="private", text="Access denied"):
""" Callback decorator to require HTTP auth (basic).
TODO: Add route(check_auth=...) parameter. """
def decorator(func):
@functools.wraps(func)
def wrapper(*a, **ka):
user, password = request.auth or (None, None)
if user is None or not check(user, password):
err = HTTPError(401, text)
err.add_header('WWW-Authenticate', 'Basic realm="%s"' % realm)
return err
return func(*a, **ka)
return wrapper
return decorator
# Shortcuts for common Bottle methods.
# They all refer to the current default application.
def make_default_app_wrapper(name):
""" Return a callable that relays calls to the current default app. """
@functools.wraps(getattr(Bottle, name))
def wrapper(*a, **ka):
return getattr(app(), name)(*a, **ka)
return wrapper
route = make_default_app_wrapper('route')
get = make_default_app_wrapper('get')
post = make_default_app_wrapper('post')
put = make_default_app_wrapper('put')
delete = make_default_app_wrapper('delete')
patch = make_default_app_wrapper('patch')
error = make_default_app_wrapper('error')
mount = make_default_app_wrapper('mount')
hook = make_default_app_wrapper('hook')
install = make_default_app_wrapper('install')
uninstall = make_default_app_wrapper('uninstall')
url = make_default_app_wrapper('get_url')
###############################################################################
# Server Adapter ###############################################################
###############################################################################
class ServerAdapter(object):
quiet = False
def __init__(self, host='127.0.0.1', port=8080, **options):
self.options = options
self.host = host
self.port = int(port)
def run(self, handler): # pragma: no cover
pass
def __repr__(self):
args = ', '.join(['%s=%s'%(k,repr(v)) for k, v in self.options.items()])
return "%s(%s)" % (self.__class__.__name__, args)
class CGIServer(ServerAdapter):
quiet = True
def run(self, handler): # pragma: no cover
from wsgiref.handlers import CGIHandler
def fixed_environ(environ, start_response):
environ.setdefault('PATH_INFO', '')
return handler(environ, start_response)
CGIHandler().run(fixed_environ)
class FlupFCGIServer(ServerAdapter):
def run(self, handler): # pragma: no cover
import flup.server.fcgi
self.options.setdefault('bindAddress', (self.host, self.port))
flup.server.fcgi.WSGIServer(handler, **self.options).run()
class WSGIRefServer(ServerAdapter):
def run(self, app): # pragma: no cover
from wsgiref.simple_server import make_server
from wsgiref.simple_server import WSGIRequestHandler, WSGIServer
import socket
class FixedHandler(WSGIRequestHandler):
def address_string(self): # Prevent reverse DNS lookups please.
return self.client_address[0]
def log_request(*args, **kw):
if not self.quiet:
return WSGIRequestHandler.log_request(*args, **kw)
handler_cls = self.options.get('handler_class', FixedHandler)
server_cls = self.options.get('server_class', WSGIServer)
if ':' in self.host: # Fix wsgiref for IPv6 addresses.
if getattr(server_cls, 'address_family') == socket.AF_INET:
class server_cls(server_cls):
address_family = socket.AF_INET6
self.srv = make_server(self.host, self.port, app, server_cls, handler_cls)
self.port = self.srv.server_port # update port actual port (0 means random)
try:
self.srv.serve_forever()
except KeyboardInterrupt:
self.srv.server_close() # Prevent ResourceWarning: unclosed socket
raise
class CherryPyServer(ServerAdapter):
def run(self, handler): # pragma: no cover
from cherrypy import wsgiserver
self.options['bind_addr'] = (self.host, self.port)
self.options['wsgi_app'] = handler
certfile = self.options.get('certfile')
if certfile:
del self.options['certfile']
keyfile = self.options.get('keyfile')
if keyfile:
del self.options['keyfile']
server = wsgiserver.CherryPyWSGIServer(**self.options)
if certfile:
server.ssl_certificate = certfile
if keyfile:
server.ssl_private_key = keyfile
try:
server.start()
finally:
server.stop()
class WaitressServer(ServerAdapter):
def run(self, handler):
from waitress import serve
serve(handler, host=self.host, port=self.port, _quiet=self.quiet)
class PasteServer(ServerAdapter):
def run(self, handler): # pragma: no cover
from paste import httpserver
from paste.translogger import TransLogger
handler = TransLogger(handler, setup_console_handler=(not self.quiet))
httpserver.serve(handler, host=self.host, port=str(self.port),
**self.options)
class MeinheldServer(ServerAdapter):
def run(self, handler):
from meinheld import server
server.listen((self.host, self.port))
server.run(handler)
class FapwsServer(ServerAdapter):
""" Extremely fast webserver using libev. See http://www.fapws.org/ """
def run(self, handler): # pragma: no cover
import fapws._evwsgi as evwsgi
from fapws import base, config
port = self.port
if float(config.SERVER_IDENT[-2:]) > 0.4:
# fapws3 silently changed its API in 0.5
port = str(port)
evwsgi.start(self.host, port)
# fapws3 never releases the GIL. Complain upstream. I tried. No luck.
if 'BOTTLE_CHILD' in os.environ and not self.quiet:
_stderr("WARNING: Auto-reloading does not work with Fapws3.\n")
_stderr(" (Fapws3 breaks python thread support)\n")
evwsgi.set_base_module(base)
def app(environ, start_response):
environ['wsgi.multiprocess'] = False
return handler(environ, start_response)
evwsgi.wsgi_cb(('', app))
evwsgi.run()
class TornadoServer(ServerAdapter):
""" The super hyped asynchronous server by facebook. Untested. """
def run(self, handler): # pragma: no cover
import tornado.wsgi, tornado.httpserver, tornado.ioloop
container = tornado.wsgi.WSGIContainer(handler)
server = tornado.httpserver.HTTPServer(container)
server.listen(port=self.port,address=self.host)
tornado.ioloop.IOLoop.instance().start()
class AppEngineServer(ServerAdapter):
""" Adapter for Google App Engine. """
quiet = True
def run(self, handler):
from google.appengine.ext.webapp import util
# A main() function in the handler script enables 'App Caching'.
# Lets makes sure it is there. This _really_ improves performance.
module = sys.modules.get('__main__')
if module and not hasattr(module, 'main'):
module.main = lambda: util.run_wsgi_app(handler)
util.run_wsgi_app(handler)
class TwistedServer(ServerAdapter):
""" Untested. """
def run(self, handler):
from twisted.web import server, wsgi
from twisted.python.threadpool import ThreadPool
from twisted.internet import reactor
thread_pool = ThreadPool()
thread_pool.start()
reactor.addSystemEventTrigger('after', 'shutdown', thread_pool.stop)
factory = server.Site(wsgi.WSGIResource(reactor, thread_pool, handler))
reactor.listenTCP(self.port, factory, interface=self.host)
if not reactor.running:
reactor.run()
class DieselServer(ServerAdapter):
""" Untested. """
def run(self, handler):
from diesel.protocols.wsgi import WSGIApplication
app = WSGIApplication(handler, port=self.port)
app.run()
class GeventServer(ServerAdapter):
""" Untested. Options:
* `fast` (default: False) uses libevent's http server, but has some
issues: No streaming, no pipelining, no SSL.
* See gevent.wsgi.WSGIServer() documentation for more options.
"""
def run(self, handler):
from gevent import wsgi, pywsgi, local
if not isinstance(threading.local(), local.local):
msg = "Bottle requires gevent.monkey.patch_all() (before import)"
raise RuntimeError(msg)
if not self.options.pop('fast', None): wsgi = pywsgi
self.options['log'] = None if self.quiet else 'default'
address = (self.host, self.port)
server = wsgi.WSGIServer(address, handler, **self.options)
if 'BOTTLE_CHILD' in os.environ:
import signal
signal.signal(signal.SIGINT, lambda s, f: server.stop())
server.serve_forever()
class GeventSocketIOServer(ServerAdapter):
def run(self,handler):
from socketio import server
address = (self.host, self.port)
server.SocketIOServer(address, handler, **self.options).serve_forever()
class GunicornServer(ServerAdapter):
""" Untested. See http://gunicorn.org/configure.html for options. """
def run(self, handler):
from gunicorn.app.base import Application
config = {'bind': "%s:%d" % (self.host, int(self.port))}
config.update(self.options)
class GunicornApplication(Application):
def init(self, parser, opts, args):
return config
def load(self):
return handler
GunicornApplication().run()
class EventletServer(ServerAdapter):
""" Untested. Options:
* `backlog` adjust the eventlet backlog parameter which is the maximum
number of queued connections. Should be at least 1; the maximum
value is system-dependent.
* `family`: (default is 2) socket family, optional. See socket
documentation for available families.
"""
def run(self, handler):
from eventlet import wsgi, listen, patcher
if not patcher.is_monkey_patched(os):
msg = "Bottle requires eventlet.monkey_patch() (before import)"
raise RuntimeError(msg)
socket_args = {}
for arg in ('backlog', 'family'):
try:
socket_args[arg] = self.options.pop(arg)
except KeyError:
pass
address = (self.host, self.port)
try:
wsgi.server(listen(address, **socket_args), handler,
log_output=(not self.quiet))
except TypeError:
# Fallback, if we have old version of eventlet
wsgi.server(listen(address), handler)
class RocketServer(ServerAdapter):
""" Untested. """
def run(self, handler):
from rocket import Rocket
server = Rocket((self.host, self.port), 'wsgi', { 'wsgi_app' : handler })
server.start()
class BjoernServer(ServerAdapter):
""" Fast server written in C: https://github.com/jonashaag/bjoern """
def run(self, handler):
from bjoern import run
run(handler, self.host, self.port)
class AutoServer(ServerAdapter):
""" Untested. """
adapters = [WaitressServer, PasteServer, TwistedServer, CherryPyServer, WSGIRefServer]
def run(self, handler):
for sa in self.adapters:
try:
return sa(self.host, self.port, **self.options).run(handler)
except ImportError:
pass
server_names = {
'cgi': CGIServer,
'flup': FlupFCGIServer,
'wsgiref': WSGIRefServer,
'waitress': WaitressServer,
'cherrypy': CherryPyServer,
'paste': PasteServer,
'fapws3': FapwsServer,
'tornado': TornadoServer,
'gae': AppEngineServer,
'twisted': TwistedServer,
'diesel': DieselServer,
'meinheld': MeinheldServer,
'gunicorn': GunicornServer,
'eventlet': EventletServer,
'gevent': GeventServer,
'geventSocketIO':GeventSocketIOServer,
'rocket': RocketServer,
'bjoern' : BjoernServer,
'auto': AutoServer,
}
###############################################################################
# Application Control ##########################################################
###############################################################################
def load(target, **namespace):
""" Import a module or fetch an object from a module.
* ``package.module`` returns `module` as a module object.
* ``pack.mod:name`` returns the module variable `name` from `pack.mod`.
* ``pack.mod:func()`` calls `pack.mod.func()` and returns the result.
The last form accepts not only function calls, but any type of
expression. Keyword arguments passed to this function are available as
local variables. Example: ``import_string('re:compile(x)', x='[a-z]')``
"""
module, target = target.split(":", 1) if ':' in target else (target, None)
if module not in sys.modules: __import__(module)
if not target: return sys.modules[module]
if target.isalnum(): return getattr(sys.modules[module], target)
package_name = module.split('.')[0]
namespace[package_name] = sys.modules[package_name]
return eval('%s.%s' % (module, target), namespace)
def load_app(target):
""" Load a bottle application from a module and make sure that the import
does not affect the current default application, but returns a separate
application object. See :func:`load` for the target parameter. """
global NORUN; NORUN, nr_old = True, NORUN
tmp = default_app.push() # Create a new "default application"
try:
rv = load(target) # Import the target module
return rv if callable(rv) else tmp
finally:
default_app.remove(tmp) # Remove the temporary added default application
NORUN = nr_old
_debug = debug
def run(app=None, server='wsgiref', host='127.0.0.1', port=8080,
interval=1, reloader=False, quiet=False, plugins=None,
debug=None, **kargs):
""" Start a server instance. This method blocks until the server terminates.
:param app: WSGI application or target string supported by
:func:`load_app`. (default: :func:`default_app`)
:param server: Server adapter to use. See :data:`server_names` keys
for valid names or pass a :class:`ServerAdapter` subclass.
(default: `wsgiref`)
:param host: Server address to bind to. Pass ``0.0.0.0`` to listens on
all interfaces including the external one. (default: 127.0.0.1)
:param port: Server port to bind to. Values below 1024 require root
privileges. (default: 8080)
:param reloader: Start auto-reloading server? (default: False)
:param interval: Auto-reloader interval in seconds (default: 1)
:param quiet: Suppress output to stdout and stderr? (default: False)
:param options: Options passed to the server adapter.
"""
if NORUN: return
if reloader and not os.environ.get('BOTTLE_CHILD'):
lockfile = None
try:
fd, lockfile = tempfile.mkstemp(prefix='bottle.', suffix='.lock')
os.close(fd) # We only need this file to exist. We never write to it
while os.path.exists(lockfile):
args = [sys.executable] + sys.argv
environ = os.environ.copy()
environ['BOTTLE_CHILD'] = 'true'
environ['BOTTLE_LOCKFILE'] = lockfile
p = subprocess.Popen(args, env=environ)
while p.poll() is None: # Busy wait...
os.utime(lockfile, None) # I am alive!
time.sleep(interval)
if p.poll() != 3:
if os.path.exists(lockfile): os.unlink(lockfile)
sys.exit(p.poll())
except KeyboardInterrupt:
pass
finally:
if os.path.exists(lockfile):
os.unlink(lockfile)
return
try:
if debug is not None: _debug(debug)
app = app or default_app()
if isinstance(app, basestring):
app = load_app(app)
if not callable(app):
raise ValueError("Application is not callable: %r" % app)
for plugin in plugins or []:
if isinstance(plugin, basestring):
plugin = load(plugin)
app.install(plugin)
if server in server_names:
server = server_names.get(server)
if isinstance(server, basestring):
server = load(server)
if isinstance(server, type):
server = server(host=host, port=port, **kargs)
if not isinstance(server, ServerAdapter):
raise ValueError("Unknown or unsupported server: %r" % server)
server.quiet = server.quiet or quiet
if not server.quiet:
_stderr("Bottle v%s server starting up (using %s)...\n" % (__version__, repr(server)))
_stderr("Listening on http://%s:%d/\n" % (server.host, server.port))
_stderr("Hit Ctrl-C to quit.\n\n")
if reloader:
lockfile = os.environ.get('BOTTLE_LOCKFILE')
bgcheck = FileCheckerThread(lockfile, interval)
with bgcheck:
server.run(app)
if bgcheck.status == 'reload':
sys.exit(3)
else:
server.run(app)
except KeyboardInterrupt:
pass
except (SystemExit, MemoryError):
raise
except:
if not reloader: raise
if not getattr(server, 'quiet', quiet):
print_exc()
time.sleep(interval)
sys.exit(3)
class FileCheckerThread(threading.Thread):
""" Interrupt main-thread as soon as a changed module file is detected,
the lockfile gets deleted or gets to old. """
def __init__(self, lockfile, interval):
threading.Thread.__init__(self)
self.daemon = True
self.lockfile, self.interval = lockfile, interval
#: Is one of 'reload', 'error' or 'exit'
self.status = None
def run(self):
exists = os.path.exists
mtime = lambda p: os.stat(p).st_mtime
files = dict()
for module in list(sys.modules.values()):
path = getattr(module, '__file__', '')
if path[-4:] in ('.pyo', '.pyc'): path = path[:-1]
if path and exists(path): files[path] = mtime(path)
while not self.status:
if not exists(self.lockfile)\
or mtime(self.lockfile) < time.time() - self.interval - 5:
self.status = 'error'
thread.interrupt_main()
for path, lmtime in list(files.items()):
if not exists(path) or mtime(path) > lmtime:
self.status = 'reload'
thread.interrupt_main()
break
time.sleep(self.interval)
def __enter__(self):
self.start()
def __exit__(self, exc_type, *_):
if not self.status: self.status = 'exit' # silent exit
self.join()
return exc_type is not None and issubclass(exc_type, KeyboardInterrupt)
###############################################################################
# Template Adapters ############################################################
###############################################################################
class TemplateError(HTTPError):
def __init__(self, message):
HTTPError.__init__(self, 500, message)
class BaseTemplate(object):
""" Base class and minimal API for template adapters """
extensions = ['tpl','html','thtml','stpl']
settings = {} #used in prepare()
defaults = {} #used in render()
def __init__(self, source=None, name=None, lookup=None, encoding='utf8', **settings):
""" Create a new template.
If the source parameter (str or buffer) is missing, the name argument
is used to guess a template filename. Subclasses can assume that
self.source and/or self.filename are set. Both are strings.
The lookup, encoding and settings parameters are stored as instance
variables.
The lookup parameter stores a list containing directory paths.
The encoding parameter should be used to decode byte strings or files.
The settings parameter contains a dict for engine-specific settings.
"""
self.name = name
self.source = source.read() if hasattr(source, 'read') else source
self.filename = source.filename if hasattr(source, 'filename') else None
self.lookup = [os.path.abspath(x) for x in lookup] if lookup else []
self.encoding = encoding
self.settings = self.settings.copy() # Copy from class variable
self.settings.update(settings) # Apply
if not self.source and self.name:
self.filename = self.search(self.name, self.lookup)
if not self.filename:
raise TemplateError('Template %s not found.' % repr(name))
if not self.source and not self.filename:
raise TemplateError('No template specified.')
self.prepare(**self.settings)
@classmethod
def search(cls, name, lookup=None):
""" Search name in all directories specified in lookup.
First without, then with common extensions. Return first hit. """
if not lookup:
depr('The template lookup path list should not be empty.', True) #0.12
lookup = ['.']
if os.path.isabs(name) and os.path.isfile(name):
depr('Absolute template path names are deprecated.', True) #0.12
return os.path.abspath(name)
for spath in lookup:
spath = os.path.abspath(spath) + os.sep
fname = os.path.abspath(os.path.join(spath, name))
if not fname.startswith(spath): continue
if os.path.isfile(fname): return fname
for ext in cls.extensions:
if os.path.isfile('%s.%s' % (fname, ext)):
return '%s.%s' % (fname, ext)
@classmethod
def global_config(cls, key, *args):
""" This reads or sets the global settings stored in class.settings. """
if args:
cls.settings = cls.settings.copy() # Make settings local to class
cls.settings[key] = args[0]
else:
return cls.settings[key]
def prepare(self, **options):
""" Run preparations (parsing, caching, ...).
It should be possible to call this again to refresh a template or to
update settings.
"""
raise NotImplementedError
def render(self, *args, **kwargs):
""" Render the template with the specified local variables and return
a single byte or unicode string. If it is a byte string, the encoding
must match self.encoding. This method must be thread-safe!
Local variables may be provided in dictionaries (args)
or directly, as keywords (kwargs).
"""
raise NotImplementedError
class MakoTemplate(BaseTemplate):
def prepare(self, **options):
from mako.template import Template
from mako.lookup import TemplateLookup
options.update({'input_encoding':self.encoding})
options.setdefault('format_exceptions', bool(DEBUG))
lookup = TemplateLookup(directories=self.lookup, **options)
if self.source:
self.tpl = Template(self.source, lookup=lookup, **options)
else:
self.tpl = Template(uri=self.name, filename=self.filename, lookup=lookup, **options)
def render(self, *args, **kwargs):
for dictarg in args: kwargs.update(dictarg)
_defaults = self.defaults.copy()
_defaults.update(kwargs)
return self.tpl.render(**_defaults)
class CheetahTemplate(BaseTemplate):
def prepare(self, **options):
from Cheetah.Template import Template
self.context = threading.local()
self.context.vars = {}
options['searchList'] = [self.context.vars]
if self.source:
self.tpl = Template(source=self.source, **options)
else:
self.tpl = Template(file=self.filename, **options)
def render(self, *args, **kwargs):
for dictarg in args: kwargs.update(dictarg)
self.context.vars.update(self.defaults)
self.context.vars.update(kwargs)
out = str(self.tpl)
self.context.vars.clear()
return out
class Jinja2Template(BaseTemplate):
def prepare(self, filters=None, tests=None, globals={}, **kwargs):
from jinja2 import Environment, FunctionLoader
self.env = Environment(loader=FunctionLoader(self.loader), **kwargs)
if filters: self.env.filters.update(filters)
if tests: self.env.tests.update(tests)
if globals: self.env.globals.update(globals)
if self.source:
self.tpl = self.env.from_string(self.source)
else:
self.tpl = self.env.get_template(self.filename)
def render(self, *args, **kwargs):
for dictarg in args: kwargs.update(dictarg)
_defaults = self.defaults.copy()
_defaults.update(kwargs)
return self.tpl.render(**_defaults)
def loader(self, name):
fname = self.search(name, self.lookup)
if not fname: return
with open(fname, "rb") as f:
return f.read().decode(self.encoding)
class SimpleTemplate(BaseTemplate):
def prepare(self, escape_func=html_escape, noescape=False, syntax=None, **ka):
self.cache = {}
enc = self.encoding
self._str = lambda x: touni(x, enc)
self._escape = lambda x: escape_func(touni(x, enc))
self.syntax = syntax
if noescape:
self._str, self._escape = self._escape, self._str
@cached_property
def co(self):
return compile(self.code, self.filename or '<string>', 'exec')
@cached_property
def code(self):
source = self.source
if not source:
with open(self.filename, 'rb') as f:
source = f.read()
try:
source, encoding = touni(source), 'utf8'
except UnicodeError:
depr('Template encodings other than utf8 are no longer supported.') #0.11
source, encoding = touni(source, 'latin1'), 'latin1'
parser = StplParser(source, encoding=encoding, syntax=self.syntax)
code = parser.translate()
self.encoding = parser.encoding
return code
def _rebase(self, _env, _name=None, **kwargs):
_env['_rebase'] = (_name, kwargs)
def _include(self, _env, _name=None, **kwargs):
env = _env.copy()
env.update(kwargs)
if _name not in self.cache:
self.cache[_name] = self.__class__(name=_name, lookup=self.lookup)
return self.cache[_name].execute(env['_stdout'], env)
def execute(self, _stdout, kwargs):
env = self.defaults.copy()
env.update(kwargs)
env.update({'_stdout': _stdout, '_printlist': _stdout.extend,
'include': functools.partial(self._include, env),
'rebase': functools.partial(self._rebase, env), '_rebase': None,
'_str': self._str, '_escape': self._escape, 'get': env.get,
'setdefault': env.setdefault, 'defined': env.__contains__ })
eval(self.co, env)
if env.get('_rebase'):
subtpl, rargs = env.pop('_rebase')
rargs['base'] = ''.join(_stdout) #copy stdout
del _stdout[:] # clear stdout
return self._include(env, subtpl, **rargs)
return env
def render(self, *args, **kwargs):
""" Render the template using keyword arguments as local variables. """
env = {}; stdout = []
for dictarg in args: env.update(dictarg)
env.update(kwargs)
self.execute(stdout, env)
return ''.join(stdout)
class StplSyntaxError(TemplateError): pass
class StplParser(object):
""" Parser for stpl templates. """
_re_cache = {} #: Cache for compiled re patterns
# This huge pile of voodoo magic splits python code into 8 different tokens.
# 1: All kinds of python strings (trust me, it works)
_re_tok = '((?m)[urbURB]?(?:\'\'(?!\')|""(?!")|\'{6}|"{6}' \
'|\'(?:[^\\\\\']|\\\\.)+?\'|"(?:[^\\\\"]|\\\\.)+?"' \
'|\'{3}(?:[^\\\\]|\\\\.|\\n)+?\'{3}' \
'|"{3}(?:[^\\\\]|\\\\.|\\n)+?"{3}))'
_re_inl = _re_tok.replace('|\\n','') # We re-use this string pattern later
# 2: Comments (until end of line, but not the newline itself)
_re_tok += '|(#.*)'
# 3,4: Keywords that start or continue a python block (only start of line)
_re_tok += '|^([ \\t]*(?:if|for|while|with|try|def|class)\\b)' \
'|^([ \\t]*(?:elif|else|except|finally)\\b)'
# 5: Our special 'end' keyword (but only if it stands alone)
_re_tok += '|((?:^|;)[ \\t]*end[ \\t]*(?=(?:%(block_close)s[ \\t]*)?\\r?$|;|#))'
# 6: A customizable end-of-code-block template token (only end of line)
_re_tok += '|(%(block_close)s[ \\t]*(?=$))'
# 7: And finally, a single newline. The 8th token is 'everything else'
_re_tok += '|(\\r?\\n)'
# Match the start tokens of code areas in a template
_re_split = '(?m)^[ \t]*(\\\\?)((%(line_start)s)|(%(block_start)s))'
# Match inline statements (may contain python strings)
_re_inl = '%%(inline_start)s((?:%s|[^\'"\n]+?)*?)%%(inline_end)s' % _re_inl
default_syntax = '<% %> % {{ }}'
def __init__(self, source, syntax=None, encoding='utf8'):
self.source, self.encoding = touni(source, encoding), encoding
self.set_syntax(syntax or self.default_syntax)
self.code_buffer, self.text_buffer = [], []
self.lineno, self.offset = 1, 0
self.indent, self.indent_mod = 0, 0
def get_syntax(self):
""" Tokens as a space separated string (default: <% %> % {{ }}) """
return self._syntax
def set_syntax(self, syntax):
self._syntax = syntax
self._tokens = syntax.split()
if not syntax in self._re_cache:
names = 'block_start block_close line_start inline_start inline_end'
etokens = map(re.escape, self._tokens)
pattern_vars = dict(zip(names.split(), etokens))
patterns = (self._re_split, self._re_tok, self._re_inl)
patterns = [re.compile(p%pattern_vars) for p in patterns]
self._re_cache[syntax] = patterns
self.re_split, self.re_tok, self.re_inl = self._re_cache[syntax]
syntax = property(get_syntax, set_syntax)
def translate(self):
if self.offset: raise RuntimeError('Parser is a one time instance.')
while True:
m = self.re_split.search(self.source[self.offset:])
if m:
text = self.source[self.offset:self.offset+m.start()]
self.text_buffer.append(text)
offs = self.offset
self.offset += m.end()
if m.group(1): # Escape syntax
line, sep, _ = self.source[self.offset:].partition('\n')
self.text_buffer.append(self.source[offs+m.start():offs+m.start(1)]+m.group(2)+line+sep)
self.offset += len(line+sep)
continue
self.flush_text()
self.read_code(multiline=bool(m.group(4)))
else: break
self.text_buffer.append(self.source[self.offset:])
self.flush_text()
return ''.join(self.code_buffer)
def read_code(self, multiline):
code_line, comment = '', ''
while True:
m = self.re_tok.search(self.source[self.offset:])
if not m:
code_line += self.source[self.offset:]
self.offset = len(self.source)
self.write_code(code_line.strip(), comment)
return
code_line += self.source[self.offset:self.offset+m.start()]
self.offset += m.end()
_str, _com, _blk1, _blk2, _end, _cend, _nl = m.groups()
if code_line and (_blk1 or _blk2): # a if b else c
code_line += _blk1 or _blk2
continue
if _str: # Python string
code_line += _str
elif _com: # Python comment (up to EOL)
comment = _com
if multiline and _com.strip().endswith(self._tokens[1]):
multiline = False # Allow end-of-block in comments
elif _blk1: # Start-block keyword (if/for/while/def/try/...)
code_line, self.indent_mod = _blk1, -1
self.indent += 1
elif _blk2: # Continue-block keyword (else/elif/except/...)
code_line, self.indent_mod = _blk2, -1
elif _end: # The non-standard 'end'-keyword (ends a block)
self.indent -= 1
elif _cend: # The end-code-block template token (usually '%>')
if multiline: multiline = False
else: code_line += _cend
else: # \n
self.write_code(code_line.strip(), comment)
self.lineno += 1
code_line, comment, self.indent_mod = '', '', 0
if not multiline:
break
def flush_text(self):
text = ''.join(self.text_buffer)
del self.text_buffer[:]
if not text: return
parts, pos, nl = [], 0, '\\\n'+' '*self.indent
for m in self.re_inl.finditer(text):
prefix, pos = text[pos:m.start()], m.end()
if prefix:
parts.append(nl.join(map(repr, prefix.splitlines(True))))
if prefix.endswith('\n'): parts[-1] += nl
parts.append(self.process_inline(m.group(1).strip()))
if pos < len(text):
prefix = text[pos:]
lines = prefix.splitlines(True)
if lines[-1].endswith('\\\\\n'): lines[-1] = lines[-1][:-3]
elif lines[-1].endswith('\\\\\r\n'): lines[-1] = lines[-1][:-4]
parts.append(nl.join(map(repr, lines)))
code = '_printlist((%s,))' % ', '.join(parts)
self.lineno += code.count('\n')+1
self.write_code(code)
@staticmethod
def process_inline(chunk):
if chunk[0] == '!': return '_str(%s)' % chunk[1:]
return '_escape(%s)' % chunk
def write_code(self, line, comment=''):
code = ' ' * (self.indent+self.indent_mod)
code += line.lstrip() + comment + '\n'
self.code_buffer.append(code)
def template(*args, **kwargs):
"""
Get a rendered template as a string iterator.
You can use a name, a filename or a template string as first parameter.
Template rendering arguments can be passed as dictionaries
or directly (as keyword arguments).
"""
tpl = args[0] if args else None
adapter = kwargs.pop('template_adapter', SimpleTemplate)
lookup = kwargs.pop('template_lookup', TEMPLATE_PATH)
tplid = (id(lookup), tpl)
if tplid not in TEMPLATES or DEBUG:
settings = kwargs.pop('template_settings', {})
if isinstance(tpl, adapter):
TEMPLATES[tplid] = tpl
if settings: TEMPLATES[tplid].prepare(**settings)
elif "\n" in tpl or "{" in tpl or "%" in tpl or '$' in tpl:
TEMPLATES[tplid] = adapter(source=tpl, lookup=lookup, **settings)
else:
TEMPLATES[tplid] = adapter(name=tpl, lookup=lookup, **settings)
if not TEMPLATES[tplid]:
abort(500, 'Template (%s) not found' % tpl)
for dictarg in args[1:]: kwargs.update(dictarg)
return TEMPLATES[tplid].render(kwargs)
mako_template = functools.partial(template, template_adapter=MakoTemplate)
cheetah_template = functools.partial(template, template_adapter=CheetahTemplate)
jinja2_template = functools.partial(template, template_adapter=Jinja2Template)
def view(tpl_name, **defaults):
""" Decorator: renders a template for a handler.
The handler can control its behavior like that:
- return a dict of template vars to fill out the template
- return something other than a dict and the view decorator will not
process the template, but return the handler result as is.
This includes returning a HTTPResponse(dict) to get,
for instance, JSON with autojson or other castfilters.
"""
def decorator(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
result = func(*args, **kwargs)
if isinstance(result, (dict, DictMixin)):
tplvars = defaults.copy()
tplvars.update(result)
return template(tpl_name, **tplvars)
elif result is None:
return template(tpl_name, defaults)
return result
return wrapper
return decorator
mako_view = functools.partial(view, template_adapter=MakoTemplate)
cheetah_view = functools.partial(view, template_adapter=CheetahTemplate)
jinja2_view = functools.partial(view, template_adapter=Jinja2Template)
###############################################################################
# Constants and Globals ########################################################
###############################################################################
TEMPLATE_PATH = ['./', './views/']
TEMPLATES = {}
DEBUG = False
NORUN = False # If set, run() does nothing. Used by load_app()
#: A dict to map HTTP status codes (e.g. 404) to phrases (e.g. 'Not Found')
HTTP_CODES = httplib.responses
HTTP_CODES[418] = "I'm a teapot" # RFC 2324
HTTP_CODES[428] = "Precondition Required"
HTTP_CODES[429] = "Too Many Requests"
HTTP_CODES[431] = "Request Header Fields Too Large"
HTTP_CODES[511] = "Network Authentication Required"
_HTTP_STATUS_LINES = dict((k, '%d %s'%(k,v)) for (k,v) in HTTP_CODES.items())
#: The default template used for error pages. Override with @error()
ERROR_PAGE_TEMPLATE = """
%%try:
%%from %s import DEBUG, request
<!DOCTYPE HTML PUBLIC "-//IETF//DTD HTML 2.0//EN">
<html>
<head>
<title>Error: {{e.status}}</title>
<style type="text/css">
html {background-color: #eee; font-family: sans-serif;}
body {background-color: #fff; border: 1px solid #ddd;
padding: 15px; margin: 15px;}
pre {background-color: #eee; border: 1px solid #ddd; padding: 5px;}
</style>
</head>
<body>
<h1>Error: {{e.status}}</h1>
<p>Sorry, the requested URL <tt>{{repr(request.url)}}</tt>
caused an error:</p>
<pre>{{e.body}}</pre>
%%if DEBUG and e.exception:
<h2>Exception:</h2>
<pre>{{repr(e.exception)}}</pre>
%%end
%%if DEBUG and e.traceback:
<h2>Traceback:</h2>
<pre>{{e.traceback}}</pre>
%%end
</body>
</html>
%%except ImportError:
<b>ImportError:</b> Could not generate the error page. Please add bottle to
the import path.
%%end
""" % __name__
#: A thread-safe instance of :class:`LocalRequest`. If accessed from within a
#: request callback, this instance always refers to the *current* request
#: (even on a multithreaded server).
request = LocalRequest()
#: A thread-safe instance of :class:`LocalResponse`. It is used to change the
#: HTTP response for the *current* request.
response = LocalResponse()
#: A thread-safe namespace. Not used by Bottle.
local = threading.local()
# Initialize app stack (create first empty Bottle app)
# BC: 0.6.4 and needed for run()
app = default_app = AppStack()
app.push()
#: A virtual package that redirects import statements.
#: Example: ``import bottle.ext.sqlite`` actually imports `bottle_sqlite`.
ext = _ImportRedirect('bottle.ext' if __name__ == '__main__' else __name__+".ext", 'bottle_%s').module
if __name__ == '__main__':
opt, args, parser = _cmd_options, _cmd_args, _cmd_parser
if opt.version:
_stdout('Bottle %s\n'%__version__)
sys.exit(0)
if not args:
parser.print_help()
_stderr('\nError: No application entry point specified.\n')
sys.exit(1)
sys.path.insert(0, '.')
sys.modules.setdefault('bottle', sys.modules['__main__'])
host, port = (opt.bind or 'localhost'), 8080
if ':' in host and host.rfind(']') < host.rfind(':'):
host, port = host.rsplit(':', 1)
host = host.strip('[]')
run(args[0], host=host, port=int(port), server=opt.server,
reloader=opt.reload, plugins=opt.plugin, debug=opt.debug)
# THE END
| mit | 5,009,132,559,782,079,000 | 38.502477 | 108 | 0.580326 | false |
with-git/tensorflow | tensorflow/contrib/grid_rnn/python/ops/grid_rnn_cell.py | 76 | 23199 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Module for constructing GridRNN cells"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from collections import namedtuple
import functools
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn
from tensorflow.python.ops import variable_scope as vs
from tensorflow.python.platform import tf_logging as logging
from tensorflow.contrib import layers
from tensorflow.contrib import rnn
class GridRNNCell(rnn.RNNCell):
"""Grid recurrent cell.
This implementation is based on:
http://arxiv.org/pdf/1507.01526v3.pdf
This is the generic implementation of GridRNN. Users can specify arbitrary
number of dimensions,
set some of them to be priority (section 3.2), non-recurrent (section 3.3)
and input/output dimensions (section 3.4).
Weight sharing can also be specified using the `tied` parameter.
Type of recurrent units can be specified via `cell_fn`.
"""
def __init__(self,
num_units,
num_dims=1,
input_dims=None,
output_dims=None,
priority_dims=None,
non_recurrent_dims=None,
tied=False,
cell_fn=None,
non_recurrent_fn=None,
state_is_tuple=True,
output_is_tuple=True):
"""Initialize the parameters of a Grid RNN cell
Args:
num_units: int, The number of units in all dimensions of this GridRNN cell
num_dims: int, Number of dimensions of this grid.
input_dims: int or list, List of dimensions which will receive input data.
output_dims: int or list, List of dimensions from which the output will be
recorded.
priority_dims: int or list, List of dimensions to be considered as
priority dimensions.
If None, no dimension is prioritized.
non_recurrent_dims: int or list, List of dimensions that are not
recurrent.
The transfer function for non-recurrent dimensions is specified
via `non_recurrent_fn`, which is
default to be `tensorflow.nn.relu`.
tied: bool, Whether to share the weights among the dimensions of this
GridRNN cell.
If there are non-recurrent dimensions in the grid, weights are
shared between each group of recurrent and non-recurrent
dimensions.
cell_fn: function, a function which returns the recurrent cell object.
Has to be in the following signature:
```
def cell_func(num_units):
# ...
```
and returns an object of type `RNNCell`. If None, LSTMCell with
default parameters will be used.
Note that if you use a custom RNNCell (with `cell_fn`), it is your
responsibility to make sure the inner cell use `state_is_tuple=True`.
non_recurrent_fn: a tensorflow Op that will be the transfer function of
the non-recurrent dimensions
state_is_tuple: If True, accepted and returned states are tuples of the
states of the recurrent dimensions. If False, they are concatenated
along the column axis. The latter behavior will soon be deprecated.
Note that if you use a custom RNNCell (with `cell_fn`), it is your
responsibility to make sure the inner cell use `state_is_tuple=True`.
output_is_tuple: If True, the output is a tuple of the outputs of the
recurrent dimensions. If False, they are concatenated along the
column axis. The later behavior will soon be deprecated.
Raises:
TypeError: if cell_fn does not return an RNNCell instance.
"""
if not state_is_tuple:
logging.warning('%s: Using a concatenated state is slower and will '
'soon be deprecated. Use state_is_tuple=True.', self)
if not output_is_tuple:
logging.warning('%s: Using a concatenated output is slower and will'
'soon be deprecated. Use output_is_tuple=True.', self)
if num_dims < 1:
raise ValueError('dims must be >= 1: {}'.format(num_dims))
self._config = _parse_rnn_config(num_dims, input_dims, output_dims,
priority_dims, non_recurrent_dims,
non_recurrent_fn or nn.relu, tied,
num_units)
self._state_is_tuple = state_is_tuple
self._output_is_tuple = output_is_tuple
if cell_fn is None:
my_cell_fn = functools.partial(
rnn.LSTMCell, num_units=num_units, state_is_tuple=state_is_tuple)
else:
my_cell_fn = lambda: cell_fn(num_units)
if tied:
self._cells = [my_cell_fn()] * num_dims
else:
self._cells = [my_cell_fn() for _ in range(num_dims)]
if not isinstance(self._cells[0], rnn.RNNCell):
raise TypeError('cell_fn must return an RNNCell instance, saw: %s' %
type(self._cells[0]))
if self._output_is_tuple:
self._output_size = tuple(self._cells[0].output_size
for _ in self._config.outputs)
else:
self._output_size = self._cells[0].output_size * len(self._config.outputs)
if self._state_is_tuple:
self._state_size = tuple(self._cells[0].state_size
for _ in self._config.recurrents)
else:
self._state_size = self._cell_state_size() * len(self._config.recurrents)
@property
def output_size(self):
return self._output_size
@property
def state_size(self):
return self._state_size
def __call__(self, inputs, state, scope=None):
"""Run one step of GridRNN.
Args:
inputs: input Tensor, 2D, batch x input_size. Or None
state: state Tensor, 2D, batch x state_size. Note that state_size =
cell_state_size * recurrent_dims
scope: VariableScope for the created subgraph; defaults to "GridRNNCell".
Returns:
A tuple containing:
- A 2D, batch x output_size, Tensor representing the output of the cell
after reading "inputs" when previous state was "state".
- A 2D, batch x state_size, Tensor representing the new state of the cell
after reading "inputs" when previous state was "state".
"""
conf = self._config
dtype = inputs.dtype
c_prev, m_prev, cell_output_size = self._extract_states(state)
new_output = [None] * conf.num_dims
new_state = [None] * conf.num_dims
with vs.variable_scope(scope or type(self).__name__): # GridRNNCell
# project input, populate c_prev and m_prev
self._project_input(inputs, c_prev, m_prev, cell_output_size > 0)
# propagate along dimensions, first for non-priority dimensions
# then priority dimensions
_propagate(conf.non_priority, conf, self._cells, c_prev, m_prev,
new_output, new_state, True)
_propagate(conf.priority, conf, self._cells,
c_prev, m_prev, new_output, new_state, False)
# collect outputs and states
output_tensors = [new_output[i] for i in self._config.outputs]
if self._output_is_tuple:
output = tuple(output_tensors)
else:
if output_tensors:
output = array_ops.concat(output_tensors, 1)
else:
output = array_ops.zeros([0, 0], dtype)
if self._state_is_tuple:
states = tuple(new_state[i] for i in self._config.recurrents)
else:
# concat each state first, then flatten the whole thing
state_tensors = [
x for i in self._config.recurrents for x in new_state[i]
]
if state_tensors:
states = array_ops.concat(state_tensors, 1)
else:
states = array_ops.zeros([0, 0], dtype)
return output, states
def _extract_states(self, state):
"""Extract the cell and previous output tensors from the given state.
Args:
state: The RNN state.
Returns:
Tuple of the cell value, previous output, and cell_output_size.
Raises:
ValueError: If len(self._config.recurrents) != len(state).
"""
conf = self._config
# c_prev is `m` (cell value), and
# m_prev is `h` (previous output) in the paper.
# Keeping c and m here for consistency with the codebase
c_prev = [None] * conf.num_dims
m_prev = [None] * conf.num_dims
# for LSTM : state = memory cell + output, hence cell_output_size > 0
# for GRU/RNN: state = output (whose size is equal to _num_units),
# hence cell_output_size = 0
total_cell_state_size = self._cell_state_size()
cell_output_size = total_cell_state_size - conf.num_units
if self._state_is_tuple:
if len(conf.recurrents) != len(state):
raise ValueError('Expected state as a tuple of {} '
'element'.format(len(conf.recurrents)))
for recurrent_dim, recurrent_state in zip(conf.recurrents, state):
if cell_output_size > 0:
c_prev[recurrent_dim], m_prev[recurrent_dim] = recurrent_state
else:
m_prev[recurrent_dim] = recurrent_state
else:
for recurrent_dim, start_idx in zip(conf.recurrents,
range(0, self.state_size,
total_cell_state_size)):
if cell_output_size > 0:
c_prev[recurrent_dim] = array_ops.slice(state, [0, start_idx],
[-1, conf.num_units])
m_prev[recurrent_dim] = array_ops.slice(
state, [0, start_idx + conf.num_units], [-1, cell_output_size])
else:
m_prev[recurrent_dim] = array_ops.slice(state, [0, start_idx],
[-1, conf.num_units])
return c_prev, m_prev, cell_output_size
def _project_input(self, inputs, c_prev, m_prev, with_c):
"""Fills in c_prev and m_prev with projected input, for input dimensions.
Args:
inputs: inputs tensor
c_prev: cell value
m_prev: previous output
with_c: boolean; whether to include project_c.
Raises:
ValueError: if len(self._config.input) != len(inputs)
"""
conf = self._config
if (inputs is not None and inputs.get_shape().with_rank(2)[1].value > 0 and
conf.inputs):
if isinstance(inputs, tuple):
if len(conf.inputs) != len(inputs):
raise ValueError('Expect inputs as a tuple of {} '
'tensors'.format(len(conf.inputs)))
input_splits = inputs
else:
input_splits = array_ops.split(
value=inputs, num_or_size_splits=len(conf.inputs), axis=1)
input_sz = input_splits[0].get_shape().with_rank(2)[1].value
for i, j in enumerate(conf.inputs):
input_project_m = vs.get_variable(
'project_m_{}'.format(j), [input_sz, conf.num_units],
dtype=inputs.dtype)
m_prev[j] = math_ops.matmul(input_splits[i], input_project_m)
if with_c:
input_project_c = vs.get_variable(
'project_c_{}'.format(j), [input_sz, conf.num_units],
dtype=inputs.dtype)
c_prev[j] = math_ops.matmul(input_splits[i], input_project_c)
def _cell_state_size(self):
"""Total size of the state of the inner cell used in this grid.
Returns:
Total size of the state of the inner cell.
"""
state_sizes = self._cells[0].state_size
if isinstance(state_sizes, tuple):
return sum(state_sizes)
return state_sizes
"""Specialized cells, for convenience
"""
class Grid1BasicRNNCell(GridRNNCell):
"""1D BasicRNN cell"""
def __init__(self, num_units, state_is_tuple=True, output_is_tuple=True):
super(Grid1BasicRNNCell, self).__init__(
num_units=num_units,
num_dims=1,
input_dims=0,
output_dims=0,
priority_dims=0,
tied=False,
cell_fn=lambda n: rnn.BasicRNNCell(num_units=n),
state_is_tuple=state_is_tuple,
output_is_tuple=output_is_tuple)
class Grid2BasicRNNCell(GridRNNCell):
"""2D BasicRNN cell
This creates a 2D cell which receives input and gives output in the first
dimension.
The first dimension can optionally be non-recurrent if `non_recurrent_fn` is
specified.
"""
def __init__(self,
num_units,
tied=False,
non_recurrent_fn=None,
state_is_tuple=True,
output_is_tuple=True):
super(Grid2BasicRNNCell, self).__init__(
num_units=num_units,
num_dims=2,
input_dims=0,
output_dims=0,
priority_dims=0,
tied=tied,
non_recurrent_dims=None if non_recurrent_fn is None else 0,
cell_fn=lambda n: rnn.BasicRNNCell(num_units=n),
non_recurrent_fn=non_recurrent_fn,
state_is_tuple=state_is_tuple,
output_is_tuple=output_is_tuple)
class Grid1BasicLSTMCell(GridRNNCell):
"""1D BasicLSTM cell."""
def __init__(self,
num_units,
forget_bias=1,
state_is_tuple=True,
output_is_tuple=True):
def cell_fn(n):
return rnn.BasicLSTMCell(num_units=n, forget_bias=forget_bias)
super(Grid1BasicLSTMCell, self).__init__(
num_units=num_units,
num_dims=1,
input_dims=0,
output_dims=0,
priority_dims=0,
tied=False,
cell_fn=cell_fn,
state_is_tuple=state_is_tuple,
output_is_tuple=output_is_tuple)
class Grid2BasicLSTMCell(GridRNNCell):
"""2D BasicLSTM cell.
This creates a 2D cell which receives input and gives output in the first
dimension.
The first dimension can optionally be non-recurrent if `non_recurrent_fn` is
specified.
"""
def __init__(self,
num_units,
tied=False,
non_recurrent_fn=None,
forget_bias=1,
state_is_tuple=True,
output_is_tuple=True):
def cell_fn(n):
return rnn.BasicLSTMCell(num_units=n, forget_bias=forget_bias)
super(Grid2BasicLSTMCell, self).__init__(
num_units=num_units,
num_dims=2,
input_dims=0,
output_dims=0,
priority_dims=0,
tied=tied,
non_recurrent_dims=None if non_recurrent_fn is None else 0,
cell_fn=cell_fn,
non_recurrent_fn=non_recurrent_fn,
state_is_tuple=state_is_tuple,
output_is_tuple=output_is_tuple)
class Grid1LSTMCell(GridRNNCell):
"""1D LSTM cell.
This is different from Grid1BasicLSTMCell because it gives options to
specify the forget bias and enabling peepholes.
"""
def __init__(self,
num_units,
use_peepholes=False,
forget_bias=1.0,
state_is_tuple=True,
output_is_tuple=True):
def cell_fn(n):
return rnn.LSTMCell(
num_units=n, forget_bias=forget_bias, use_peepholes=use_peepholes)
super(Grid1LSTMCell, self).__init__(
num_units=num_units,
num_dims=1,
input_dims=0,
output_dims=0,
priority_dims=0,
cell_fn=cell_fn,
state_is_tuple=state_is_tuple,
output_is_tuple=output_is_tuple)
class Grid2LSTMCell(GridRNNCell):
"""2D LSTM cell.
This creates a 2D cell which receives input and gives output in the first
dimension.
The first dimension can optionally be non-recurrent if `non_recurrent_fn` is
specified.
"""
def __init__(self,
num_units,
tied=False,
non_recurrent_fn=None,
use_peepholes=False,
forget_bias=1.0,
state_is_tuple=True,
output_is_tuple=True):
def cell_fn(n):
return rnn.LSTMCell(
num_units=n, forget_bias=forget_bias, use_peepholes=use_peepholes)
super(Grid2LSTMCell, self).__init__(
num_units=num_units,
num_dims=2,
input_dims=0,
output_dims=0,
priority_dims=0,
tied=tied,
non_recurrent_dims=None if non_recurrent_fn is None else 0,
cell_fn=cell_fn,
non_recurrent_fn=non_recurrent_fn,
state_is_tuple=state_is_tuple,
output_is_tuple=output_is_tuple)
class Grid3LSTMCell(GridRNNCell):
"""3D BasicLSTM cell.
This creates a 2D cell which receives input and gives output in the first
dimension.
The first dimension can optionally be non-recurrent if `non_recurrent_fn` is
specified.
The second and third dimensions are LSTM.
"""
def __init__(self,
num_units,
tied=False,
non_recurrent_fn=None,
use_peepholes=False,
forget_bias=1.0,
state_is_tuple=True,
output_is_tuple=True):
def cell_fn(n):
return rnn.LSTMCell(
num_units=n, forget_bias=forget_bias, use_peepholes=use_peepholes)
super(Grid3LSTMCell, self).__init__(
num_units=num_units,
num_dims=3,
input_dims=0,
output_dims=0,
priority_dims=0,
tied=tied,
non_recurrent_dims=None if non_recurrent_fn is None else 0,
cell_fn=cell_fn,
non_recurrent_fn=non_recurrent_fn,
state_is_tuple=state_is_tuple,
output_is_tuple=output_is_tuple)
class Grid2GRUCell(GridRNNCell):
"""2D LSTM cell.
This creates a 2D cell which receives input and gives output in the first
dimension.
The first dimension can optionally be non-recurrent if `non_recurrent_fn` is
specified.
"""
def __init__(self,
num_units,
tied=False,
non_recurrent_fn=None,
state_is_tuple=True,
output_is_tuple=True):
super(Grid2GRUCell, self).__init__(
num_units=num_units,
num_dims=2,
input_dims=0,
output_dims=0,
priority_dims=0,
tied=tied,
non_recurrent_dims=None if non_recurrent_fn is None else 0,
cell_fn=lambda n: rnn.GRUCell(num_units=n),
non_recurrent_fn=non_recurrent_fn,
state_is_tuple=state_is_tuple,
output_is_tuple=output_is_tuple)
# Helpers
_GridRNNDimension = namedtuple('_GridRNNDimension', [
'idx', 'is_input', 'is_output', 'is_priority', 'non_recurrent_fn'
])
_GridRNNConfig = namedtuple('_GridRNNConfig',
['num_dims', 'dims', 'inputs', 'outputs',
'recurrents', 'priority', 'non_priority', 'tied',
'num_units'])
def _parse_rnn_config(num_dims, ls_input_dims, ls_output_dims, ls_priority_dims,
ls_non_recurrent_dims, non_recurrent_fn, tied, num_units):
def check_dim_list(ls):
if ls is None:
ls = []
if not isinstance(ls, (list, tuple)):
ls = [ls]
ls = sorted(set(ls))
if any(_ < 0 or _ >= num_dims for _ in ls):
raise ValueError('Invalid dims: {}. Must be in [0, {})'.format(ls,
num_dims))
return ls
input_dims = check_dim_list(ls_input_dims)
output_dims = check_dim_list(ls_output_dims)
priority_dims = check_dim_list(ls_priority_dims)
non_recurrent_dims = check_dim_list(ls_non_recurrent_dims)
rnn_dims = []
for i in range(num_dims):
rnn_dims.append(
_GridRNNDimension(
idx=i,
is_input=(i in input_dims),
is_output=(i in output_dims),
is_priority=(i in priority_dims),
non_recurrent_fn=non_recurrent_fn
if i in non_recurrent_dims else None))
return _GridRNNConfig(
num_dims=num_dims,
dims=rnn_dims,
inputs=input_dims,
outputs=output_dims,
recurrents=[x for x in range(num_dims) if x not in non_recurrent_dims],
priority=priority_dims,
non_priority=[x for x in range(num_dims) if x not in priority_dims],
tied=tied,
num_units=num_units)
def _propagate(dim_indices, conf, cells, c_prev, m_prev, new_output, new_state,
first_call):
"""Propagates through all the cells in dim_indices dimensions.
"""
if len(dim_indices) == 0:
return
# Because of the way RNNCells are implemented, we take the last dimension
# (H_{N-1}) out and feed it as the state of the RNN cell
# (in `last_dim_output`).
# The input of the cell (H_0 to H_{N-2}) are concatenated into `cell_inputs`
if conf.num_dims > 1:
ls_cell_inputs = [None] * (conf.num_dims - 1)
for d in conf.dims[:-1]:
if new_output[d.idx] is None:
ls_cell_inputs[d.idx] = m_prev[d.idx]
else:
ls_cell_inputs[d.idx] = new_output[d.idx]
cell_inputs = array_ops.concat(ls_cell_inputs, 1)
else:
cell_inputs = array_ops.zeros([m_prev[0].get_shape().as_list()[0], 0],
m_prev[0].dtype)
last_dim_output = (new_output[-1]
if new_output[-1] is not None else m_prev[-1])
for i in dim_indices:
d = conf.dims[i]
if d.non_recurrent_fn:
if conf.num_dims > 1:
linear_args = array_ops.concat([cell_inputs, last_dim_output], 1)
else:
linear_args = last_dim_output
with vs.variable_scope('non_recurrent' if conf.tied else
'non_recurrent/cell_{}'.format(i)):
if conf.tied and not (first_call and i == dim_indices[0]):
vs.get_variable_scope().reuse_variables()
new_output[d.idx] = layers.fully_connected(
linear_args,
num_outputs=conf.num_units,
activation_fn=d.non_recurrent_fn,
weights_initializer=(vs.get_variable_scope().initializer or
layers.initializers.xavier_initializer),
weights_regularizer=vs.get_variable_scope().regularizer)
else:
if c_prev[i] is not None:
cell_state = (c_prev[i], last_dim_output)
else:
# for GRU/RNN, the state is just the previous output
cell_state = last_dim_output
with vs.variable_scope('recurrent' if conf.tied else
'recurrent/cell_{}'.format(i)):
if conf.tied and not (first_call and i == dim_indices[0]):
vs.get_variable_scope().reuse_variables()
cell = cells[i]
new_output[d.idx], new_state[d.idx] = cell(cell_inputs, cell_state)
| apache-2.0 | 4,799,294,087,729,401,000 | 33.885714 | 80 | 0.59925 | false |
albertz/music-player | mac/pyobjc-core/PyObjCTest/test_object_property.py | 2 | 19504 | from __future__ import unicode_literals
from PyObjCTools.TestSupport import *
import objc
import copy
from PyObjCTest.fnd import *
objc.registerMetaDataForSelector(
b"NSObject", b"validateValue:forKey:error:",
dict(
arguments={
2: dict(type_modifier=objc._C_INOUT),
4: dict(type_modifier=objc._C_OUT),
},
))
class OCCopy (NSObject):
def copy(self):
return self.copyWithZone_(None)
def copyWithZone_(self, zone):
v = OCCopy.allocWithZone_(zone).init()
return v
class OCObserve (NSObject):
def init(self):
self = super(OCObserve, self).init()
self.values = []
self.registrations = []
return self
@property
def seen(self):
return { v[1]: v[2]['new'] for v in self.values }
def register(self, object, keypath):
object.addObserver_forKeyPath_options_context_(
self, keypath, 0x3, None)
self.registrations.append((object, keypath))
def unregister(self, object, keypath):
object.removeObserver_forKeyPath_(self, keypath)
def observeValueForKeyPath_ofObject_change_context_(
self, keypath, object, change, context):
# We don't get to keep the 'change' dictionary, make
# a copy (it gets reused in future calls)
new_change = {}
for k in change:
v = change[k]
if isinstance(v, (list, tuple, set)):
v = copy.copy(v)
new_change[k] = v
self.values.append((object, keypath, new_change))
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
for o, k in self.registrations:
self.unregister(o, k)
self.registrations = []
class TestObjectProperty (TestCase):
def testCreation(self):
class OCTestObjectProperty1 (NSObject):
p1 = objc.object_property()
p2 = objc.object_property(copy=True)
p3 = objc.object_property(read_only=True)
p4 = objc.object_property(ivar='myp4')
p5 = objc.object_property(typestr=objc._C_INT)
p6 = objc.object_property(typestr=objc._C_DBL)
o = OCTestObjectProperty1.alloc().init()
self.assertTrue(o.respondsToSelector(b'p1'))
self.assertTrue(o.respondsToSelector(b'setP1:'))
v = OCCopy.alloc().init()
o.p1 = v
self.assertIs(o.p1, v)
self.assertIs(o._p1, v)
self.assertTrue(o.respondsToSelector(b'p2'))
self.assertTrue(o.respondsToSelector(b'setP2:'))
o.p2 = v
self.assertIsInstance(o.p2, OCCopy)
self.assertIsNot(o.p2, v)
self.assertIsNot(o._p2, v)
self.assertTrue(o.respondsToSelector(b'p3'))
self.assertFalse(o.respondsToSelector(b'setP3:'))
o._p3 = v
self.assertIs(o.p3, v)
self.assertTrue(o.respondsToSelector(b'p4'))
self.assertTrue(o.respondsToSelector(b'setP4:'))
o.p4 = v
self.assertIs(o.p4, v)
self.assertIs(o.myp4, v)
self.assertTrue(o.respondsToSelector(b'p5'))
self.assertTrue(o.respondsToSelector(b'setP5:'))
self.assertTrue(o.respondsToSelector(b'p6'))
self.assertTrue(o.respondsToSelector(b'setP6:'))
s = o.methodSignatureForSelector_(b'p5')
self.assertEqual(s.methodReturnType(), objc._C_INT)
s = o.methodSignatureForSelector_(b'p6')
self.assertEqual(s.methodReturnType(), objc._C_DBL)
def testDepends(self):
class OCTestObjectProperty2 (NSObject):
p1 = objc.object_property()
p2 = objc.object_property()
p3 = objc.object_property(read_only=True, depends_on=['p1', 'p2'])
@p3.getter
def p3(self):
return (self.p1 or '', self.p2 or '')
class OCTestObjectProperty2b (OCTestObjectProperty2):
p4 = objc.object_property()
@OCTestObjectProperty2.p3.getter
def p3(self):
return (self.p4 or '', self.p2 or '', self.p1 or '')
p3.depends_on('p4')
p5 = objc.object_property(read_only=True)
@p5.getter
def p5(self):
return "-%s-"%(self.p4,)
p5.depends_on('p4')
observer1 = OCObserve.alloc().init()
observer2 = OCObserve.alloc().init()
object1 = OCTestObjectProperty2.alloc().init()
object2 = OCTestObjectProperty2b.alloc().init()
v = type(object1).keyPathsForValuesAffectingP3()
self.assertIsInstance(v, objc.lookUpClass('NSSet'))
self.assertEqual(v, {'p1', 'p2'})
v = type(object2).keyPathsForValuesAffectingP3()
self.assertIsInstance(v, objc.lookUpClass('NSSet'))
self.assertEqual(v, {'p1', 'p2', 'p4'})
self.assertTrue(object1.respondsToSelector('p1'))
self.assertTrue(object1.respondsToSelector('setP1:'))
self.assertTrue(object1.respondsToSelector('p2'))
self.assertTrue(object1.respondsToSelector('setP2:'))
self.assertTrue(object1.respondsToSelector('p3'))
self.assertFalse(object1.respondsToSelector('setP3:'))
self.assertTrue(object2.respondsToSelector('p1'))
self.assertTrue(object2.respondsToSelector('setP1:'))
self.assertTrue(object2.respondsToSelector('p2'))
self.assertTrue(object2.respondsToSelector('setP2:'))
self.assertTrue(object2.respondsToSelector('p3'))
self.assertFalse(object2.respondsToSelector('setP3:'))
self.assertTrue(object2.respondsToSelector('p4'))
self.assertTrue(object2.respondsToSelector('setP4:'))
observer1.register(object1, 'p1')
observer1.register(object1, 'p2')
observer1.register(object1, 'p3')
observer2.register(object2, 'p1')
observer2.register(object2, 'p2')
observer2.register(object2, 'p3')
observer2.register(object2, 'p4')
observer2.register(object2, 'p5')
try:
self.assertEqual(observer1.values, [])
self.assertEqual(observer2.values, [])
object1.p1 = "a"
object1.p2 = "b"
self.assertEqual(object1.p3, ("a", "b"))
self.assertEqual(object1.pyobjc_instanceMethods.p3(), ("a", "b"))
object2.p1 = "a"
object2.p2 = "b"
object2.p4 = "c"
self.assertEqual(object2.p3, ("c", "b", "a"))
self.assertEqual(object2.pyobjc_instanceMethods.p3(), ("c", "b", "a"))
self.assertEqual(object2.pyobjc_instanceMethods.p4(), "c")
#seen = { v[1]: v[2]['new'] for v in observer1.values }
self.assertEqual(observer1.seen,
{'p1': 'a', 'p2': 'b', 'p3': ('a', 'b') })
#seen = { v[1]: v[2]['new'] for v in observer2.values }
self.assertEqual(observer2.seen,
{'p1': 'a', 'p2': 'b', 'p3': ('c', 'b', 'a'), 'p4': 'c', 'p5': '-c-' })
finally:
observer1.unregister(object1, 'p1')
observer1.unregister(object1, 'p2')
observer1.unregister(object1, 'p3')
observer2.unregister(object2, 'p1')
observer2.unregister(object2, 'p2')
observer2.unregister(object2, 'p3')
observer2.unregister(object2, 'p4')
observer2.unregister(object2, 'p5')
def testDepends2(self):
class OCTestObjectProperty2B (NSObject):
p1 = objc.object_property()
@p1.getter
def p1(self):
return self._p1
@p1.setter
def p1(self, v):
self._p1 = v
p2 = objc.object_property()
@p2.getter
def p2(self):
return self._p2
@p2.setter
def p2(self, v):
self._p2 = v
p3 = objc.object_property(read_only=True, depends_on=['p1', 'p2'])
@p3.getter
def p3(self):
return (self.p1 or '', self.p2 or '')
class OCTestObjectProperty2Bb (OCTestObjectProperty2B):
p4 = objc.object_property()
@OCTestObjectProperty2B.p1.getter
def p1(self):
return self._p1
@OCTestObjectProperty2B.p3.getter
def p3(self):
return (self.p4 or '', self.p2 or '', self.p1 or '')
p3.depends_on('p4')
observer1 = OCObserve.alloc().init()
observer2 = OCObserve.alloc().init()
object1 = OCTestObjectProperty2B.alloc().init()
object2 = OCTestObjectProperty2Bb.alloc().init()
v = type(object1).keyPathsForValuesAffectingP3()
self.assertIsInstance(v, objc.lookUpClass('NSSet'))
self.assertEqual(v, {'p1', 'p2'})
v = type(object2).keyPathsForValuesAffectingP3()
self.assertIsInstance(v, objc.lookUpClass('NSSet'))
self.assertEqual(v, {'p1', 'p2', 'p4'})
self.assertTrue(object1.respondsToSelector('p1'))
self.assertTrue(object1.respondsToSelector('setP1:'))
self.assertTrue(object1.respondsToSelector('p2'))
self.assertTrue(object1.respondsToSelector('setP2:'))
self.assertTrue(object1.respondsToSelector('p3'))
self.assertFalse(object1.respondsToSelector('setP3:'))
self.assertTrue(object2.respondsToSelector('p1'))
self.assertTrue(object2.respondsToSelector('setP1:'))
self.assertTrue(object2.respondsToSelector('p2'))
self.assertTrue(object2.respondsToSelector('setP2:'))
self.assertTrue(object2.respondsToSelector('p3'))
self.assertFalse(object2.respondsToSelector('setP3:'))
self.assertTrue(object2.respondsToSelector('p4'))
self.assertTrue(object2.respondsToSelector('setP4:'))
observer1.register(object1, 'p1')
observer1.register(object1, 'p2')
observer1.register(object1, 'p3')
observer2.register(object2, 'p1')
observer2.register(object2, 'p2')
observer2.register(object2, 'p3')
observer2.register(object2, 'p4')
try:
self.assertEqual(observer1.values, [])
self.assertEqual(observer2.values, [])
object1.p1 = "a"
object1.p2 = "b"
self.assertEqual(object1.p3, ("a", "b"))
self.assertEqual(object1.pyobjc_instanceMethods.p3(), ("a", "b"))
object2.p1 = "a"
object2.p2 = "b"
object2.p4 = "c"
self.assertEqual(object2.p3, ("c", "b", "a"))
self.assertEqual(object2.pyobjc_instanceMethods.p3(), ("c", "b", "a"))
self.assertEqual(object2.pyobjc_instanceMethods.p4(), "c")
#seen = { v[1]: v[2]['new'] for v in observer1.values }
self.assertEqual(observer1.seen,
{'p1': 'a', 'p2': 'b', 'p3': ('a', 'b') })
#seen = { v[1]: v[2]['new'] for v in observer2.values }
self.assertEqual(observer2.seen,
{'p1': 'a', 'p2': 'b', 'p3': ('c', 'b', 'a'), 'p4': 'c' })
finally:
observer1.unregister(object1, 'p1')
observer1.unregister(object1, 'p2')
observer1.unregister(object1, 'p3')
observer2.unregister(object2, 'p1')
observer2.unregister(object2, 'p2')
observer2.unregister(object2, 'p3')
observer2.unregister(object2, 'p4')
def testMethods(self):
l = []
class OCTestObjectProperty4 (NSObject):
p1 = objc.object_property()
@p1.getter
def p1(self):
l.append(('get',))
return self._p1 + '!'
@p1.setter
def p1(self, v):
l.append(('set', v))
self._p1 = v + '?'
@p1.validate
def p1(self, value, error):
if value == 1:
return (True, value, None)
else:
return (False, 2, "snake")
class OCTestObjectProperty4b (OCTestObjectProperty4):
@OCTestObjectProperty4.p1.validate
def p1(self, value, error):
if value == 2:
return (True, value, None)
else:
return (False, 2, "monty")
o = OCTestObjectProperty4.alloc().init()
o.p1 = 'f'
self.assertEqual(o.p1, 'f?!')
self.assertEqual(o._p1, 'f?')
self.assertEqual(l, [('set', 'f'), ('get',)])
ok, value, error = o.validateValue_forKey_error_(
1, 'p1', None)
self.assertTrue(ok)
self.assertEqual(value, 1)
self.assertEqual(error, None)
ok, value, error = o.validateValue_forKey_error_(
9, 'p1', None)
self.assertFalse(ok)
self.assertEqual(value, 2)
self.assertEqual(error, "snake")
l = []
o = OCTestObjectProperty4b.alloc().init()
o.p1 = 'f'
self.assertEqual(o.p1, 'f?!')
self.assertEqual(o._p1, 'f?')
self.assertEqual(l, [('set', 'f'), ('get',)])
ok, value, error = o.validateValue_forKey_error_(
2, 'p1', None)
self.assertTrue(ok)
self.assertEqual(value, 2)
self.assertEqual(error, None)
ok, value, error = o.validateValue_forKey_error_(
9, 'p1', None)
self.assertFalse(ok)
self.assertEqual(value, 2)
self.assertEqual(error, "monty")
def testNative(self):
l = []
class OCTestObjectProperty7 (NSObject):
p1 = objc.object_property()
@p1.getter
def p1(self):
l.append('get')
return self._p1
@p1.setter
def p1(self, value):
l.append('set')
self._p1 = value
o = OCTestObjectProperty7.alloc().init()
o.setValue_forKey_(42, 'p1')
self.assertEqual(o._p1, 42)
o._p1 = "monkey"
v = o.valueForKey_('p1')
self.assertEqual(v, "monkey")
self.assertEqual(l, ["set", "get"])
def testDynamic(self):
class OCTestObjectProperty8 (NSObject):
p1 = objc.object_property(dynamic=True)
p2 = objc.object_property(dynamic=True, typestr=objc._C_NSBOOL)
self.assertFalse(OCTestObjectProperty8.instancesRespondToSelector_(b"p1"))
self.assertFalse(OCTestObjectProperty8.instancesRespondToSelector_(b"setP1:"))
self.assertFalse(OCTestObjectProperty8.instancesRespondToSelector_(b"isP2"))
self.assertFalse(OCTestObjectProperty8.instancesRespondToSelector_(b"setP2:"))
v = [42]
def getter(self):
return v[0]
def setter(self, value):
v[0] = value
OCTestObjectProperty8.p1 = getter
OCTestObjectProperty8.setP1_ = setter
v2 = [False]
def getter2(self):
return v2[0]
def setter2(self, value):
v2[0] = bool(value)
OCTestObjectProperty8.isP2 = getter2
OCTestObjectProperty8.setP2_ = setter2
self.assertTrue(OCTestObjectProperty8.instancesRespondToSelector_(b"p1"))
self.assertTrue(OCTestObjectProperty8.instancesRespondToSelector_(b"setP1:"))
self.assertTrue(OCTestObjectProperty8.instancesRespondToSelector_(b"isP2"))
self.assertTrue(OCTestObjectProperty8.instancesRespondToSelector_(b"setP2:"))
o = OCTestObjectProperty8.alloc().init()
self.assertIsInstance(OCTestObjectProperty8.p1, objc.object_property)
self.assertIsInstance(OCTestObjectProperty8.p2, objc.object_property)
self.assertEqual(o.p1, 42)
self.assertEqual(o.p2, False)
o.p1 = 99
o.p2 = True
self.assertEqual(o.p1, 99)
self.assertEqual(v[0], 99)
self.assertEqual(o.p2, True)
self.assertEqual(v2[0], True)
def testReadOnly(self):
class OCTestObjectProperty3 (NSObject):
p1 = objc.object_property(read_only=True)
o = OCTestObjectProperty3.alloc().init()
self.assertRaises(ValueError, setattr, o, 'p1', 42)
def testSubclass(self):
class OCTestObjectProperty5 (NSObject):
p1 = objc.object_property(read_only=True)
p2 = objc.object_property()
p3 = objc.object_property(read_only=True, typestr=objc._C_NSBOOL)
class OCTestObjectProperty6 (OCTestObjectProperty5):
@OCTestObjectProperty5.p1.setter
def p1(self, value):
self._p1 = value
@OCTestObjectProperty5.p2.setter
def p2(self, value):
self._p2 = value * 2
@OCTestObjectProperty5.p3.getter
def p3(self):
return not super(OCTestObjectProperty6, self).p3
base = OCTestObjectProperty5.alloc().init()
self.assertRaises(ValueError, setattr, base, 'p1', 1)
self.assertRaises(ValueError, setattr, base, 'p3', 1)
base.p2 = 'b'
self.assertEqual(base.p2, 'b')
sub = OCTestObjectProperty6.alloc().init()
sub.p1 = 1
sub.p2 = 'a'
sub._p3 = False
self.assertEqual(sub.p1, 1)
self.assertEqual(sub.p2, 'aa')
self.assertEqual(sub.p3, True)
self.assertTrue(base.respondsToSelector_(b'p2'))
self.assertFalse(base.respondsToSelector_(b'setP1:'))
self.assertTrue(base.respondsToSelector_(b'isP3'))
self.assertFalse(base.respondsToSelector_(b'p3'))
self.assertTrue(sub.respondsToSelector_(b'p2'))
self.assertTrue(sub.respondsToSelector_(b'setP1:'))
self.assertTrue(sub.respondsToSelector_(b'isP3'))
self.assertFalse(sub.respondsToSelector_(b'p3'))
try:
del sub.p3
except TypeError:
pass
else:
self.fail("Deleting an object_property shouldn't be possible")
def testDefaultSetterWithoutIvar(self):
try:
class OCTestObjectProperty7 (NSObject):
p1 = objc.object_property(ivar=objc.NULL)
except ValueError:
pass
else:
self.fail("ValueError not raised")
try:
class OCTestObjectProperty8 (NSObject):
p1 = objc.object_property(ivar=objc.NULL, read_only=True)
except ValueError:
pass
else:
self.fail("ValueError not raised")
try:
class OCTestObjectProperty9 (NSObject):
p1 = objc.object_property(read_only=True)
@p1.setter
def p1(self, v):
pass
except ValueError:
pass
else:
self.fail("ValueError not raised")
try:
class OCTestObjectProperty9 (NSObject):
p1 = objc.object_property(read_only=True)
@p1.validate
def p1(self, v):
pass
except ValueError:
pass
else:
self.fail("ValueError not raised")
class TestBoolProperty (TestCase):
def testDefault(self):
class OCTestBoolProperty1 (NSObject):
p1 = objc.bool_property()
o = OCTestBoolProperty1.alloc().init()
self.assertEqual(o.p1, False)
o.p1 = [1, 2]
self.assertEqual(o.p1, True)
if __name__ == "__main__":
main()
| bsd-2-clause | 6,946,988,906,597,692,000 | 31.779832 | 87 | 0.566499 | false |
flwh/KK_mt6589_iq451 | prebuilts/python/linux-x86/2.7.5/lib/python2.7/distutils/tests/test_cmd.py | 87 | 3901 | """Tests for distutils.cmd."""
import unittest
import os
from test.test_support import captured_stdout, run_unittest
from distutils.cmd import Command
from distutils.dist import Distribution
from distutils.errors import DistutilsOptionError
from distutils import debug
class MyCmd(Command):
def initialize_options(self):
pass
class CommandTestCase(unittest.TestCase):
def setUp(self):
dist = Distribution()
self.cmd = MyCmd(dist)
def test_ensure_string_list(self):
cmd = self.cmd
cmd.not_string_list = ['one', 2, 'three']
cmd.yes_string_list = ['one', 'two', 'three']
cmd.not_string_list2 = object()
cmd.yes_string_list2 = 'ok'
cmd.ensure_string_list('yes_string_list')
cmd.ensure_string_list('yes_string_list2')
self.assertRaises(DistutilsOptionError,
cmd.ensure_string_list, 'not_string_list')
self.assertRaises(DistutilsOptionError,
cmd.ensure_string_list, 'not_string_list2')
def test_make_file(self):
cmd = self.cmd
# making sure it raises when infiles is not a string or a list/tuple
self.assertRaises(TypeError, cmd.make_file,
infiles=1, outfile='', func='func', args=())
# making sure execute gets called properly
def _execute(func, args, exec_msg, level):
self.assertEqual(exec_msg, 'generating out from in')
cmd.force = True
cmd.execute = _execute
cmd.make_file(infiles='in', outfile='out', func='func', args=())
def test_dump_options(self):
msgs = []
def _announce(msg, level):
msgs.append(msg)
cmd = self.cmd
cmd.announce = _announce
cmd.option1 = 1
cmd.option2 = 1
cmd.user_options = [('option1', '', ''), ('option2', '', '')]
cmd.dump_options()
wanted = ["command options for 'MyCmd':", ' option1 = 1',
' option2 = 1']
self.assertEqual(msgs, wanted)
def test_ensure_string(self):
cmd = self.cmd
cmd.option1 = 'ok'
cmd.ensure_string('option1')
cmd.option2 = None
cmd.ensure_string('option2', 'xxx')
self.assertTrue(hasattr(cmd, 'option2'))
cmd.option3 = 1
self.assertRaises(DistutilsOptionError, cmd.ensure_string, 'option3')
def test_ensure_string_list(self):
cmd = self.cmd
cmd.option1 = 'ok,dok'
cmd.ensure_string_list('option1')
self.assertEqual(cmd.option1, ['ok', 'dok'])
cmd.option2 = ['xxx', 'www']
cmd.ensure_string_list('option2')
cmd.option3 = ['ok', 2]
self.assertRaises(DistutilsOptionError, cmd.ensure_string_list,
'option3')
def test_ensure_filename(self):
cmd = self.cmd
cmd.option1 = __file__
cmd.ensure_filename('option1')
cmd.option2 = 'xxx'
self.assertRaises(DistutilsOptionError, cmd.ensure_filename, 'option2')
def test_ensure_dirname(self):
cmd = self.cmd
cmd.option1 = os.path.dirname(__file__) or os.curdir
cmd.ensure_dirname('option1')
cmd.option2 = 'xxx'
self.assertRaises(DistutilsOptionError, cmd.ensure_dirname, 'option2')
def test_debug_print(self):
cmd = self.cmd
with captured_stdout() as stdout:
cmd.debug_print('xxx')
stdout.seek(0)
self.assertEqual(stdout.read(), '')
debug.DEBUG = True
try:
with captured_stdout() as stdout:
cmd.debug_print('xxx')
stdout.seek(0)
self.assertEqual(stdout.read(), 'xxx\n')
finally:
debug.DEBUG = False
def test_suite():
return unittest.makeSuite(CommandTestCase)
if __name__ == '__main__':
run_unittest(test_suite())
| gpl-2.0 | 7,578,320,308,979,014,000 | 29.716535 | 79 | 0.583953 | false |
tombstone/models | research/slim/nets/mobilenet/mobilenet_v3_test.py | 3 | 5913 | # Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for google3.third_party.tensorflow_models.slim.nets.mobilenet.mobilenet_v3."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow.compat.v1 as tf
from nets.mobilenet import mobilenet_v3
from google3.testing.pybase import parameterized
class MobilenetV3Test(tf.test.TestCase, parameterized.TestCase):
# pylint: disable = g-unreachable-test-method
def assertVariablesHaveNormalizerFn(self, use_groupnorm):
global_variables = [v.name for v in tf.global_variables()]
has_batch_norm = False
has_group_norm = False
for global_variable in global_variables:
if 'BatchNorm' in global_variable:
has_batch_norm = True
if 'GroupNorm' in global_variable:
has_group_norm = True
if use_groupnorm:
self.assertFalse(has_batch_norm)
self.assertTrue(has_group_norm)
else:
self.assertTrue(has_batch_norm)
self.assertFalse(has_group_norm)
@parameterized.named_parameters(('without_groupnorm', False),
('with_groupnorm', True))
def testMobilenetV3Large(self, use_groupnorm):
logits, endpoints = mobilenet_v3.mobilenet(
tf.placeholder(tf.float32, (1, 224, 224, 3)),
use_groupnorm=use_groupnorm)
self.assertEqual(endpoints['layer_19'].shape, [1, 1, 1, 1280])
self.assertEqual(logits.shape, [1, 1001])
self.assertVariablesHaveNormalizerFn(use_groupnorm)
@parameterized.named_parameters(('without_groupnorm', False),
('with_groupnorm', True))
def testMobilenetV3Small(self, use_groupnorm):
_, endpoints = mobilenet_v3.mobilenet(
tf.placeholder(tf.float32, (1, 224, 224, 3)),
conv_defs=mobilenet_v3.V3_SMALL,
use_groupnorm=use_groupnorm)
self.assertEqual(endpoints['layer_15'].shape, [1, 1, 1, 1024])
self.assertVariablesHaveNormalizerFn(use_groupnorm)
@parameterized.named_parameters(('without_groupnorm', False),
('with_groupnorm', True))
def testMobilenetEdgeTpu(self, use_groupnorm):
_, endpoints = mobilenet_v3.edge_tpu(
tf.placeholder(tf.float32, (1, 224, 224, 3)),
use_groupnorm=use_groupnorm)
self.assertIn('Inference mode is created by default',
mobilenet_v3.edge_tpu.__doc__)
self.assertEqual(endpoints['layer_24'].shape, [1, 7, 7, 1280])
self.assertStartsWith(
endpoints['layer_24'].name, 'MobilenetEdgeTPU')
self.assertVariablesHaveNormalizerFn(use_groupnorm)
def testMobilenetEdgeTpuChangeScope(self):
_, endpoints = mobilenet_v3.edge_tpu(
tf.placeholder(tf.float32, (1, 224, 224, 3)), scope='Scope')
self.assertStartsWith(
endpoints['layer_24'].name, 'Scope')
@parameterized.named_parameters(('without_groupnorm', False),
('with_groupnorm', True))
def testMobilenetV3BaseOnly(self, use_groupnorm):
result, endpoints = mobilenet_v3.mobilenet(
tf.placeholder(tf.float32, (1, 224, 224, 3)),
conv_defs=mobilenet_v3.V3_LARGE,
use_groupnorm=use_groupnorm,
base_only=True,
final_endpoint='layer_17')
# Get the latest layer before average pool.
self.assertEqual(endpoints['layer_17'].shape, [1, 7, 7, 960])
self.assertEqual(result, endpoints['layer_17'])
self.assertVariablesHaveNormalizerFn(use_groupnorm)
def testMobilenetV3BaseOnly_VariableInput(self):
result, endpoints = mobilenet_v3.mobilenet(
tf.placeholder(tf.float32, (None, None, None, 3)),
conv_defs=mobilenet_v3.V3_LARGE,
base_only=True,
final_endpoint='layer_17')
# Get the latest layer before average pool.
self.assertEqual(endpoints['layer_17'].shape.as_list(),
[None, None, None, 960])
self.assertEqual(result, endpoints['layer_17'])
# Use reduce mean for pooling and check for operation 'ReduceMean' in graph
@parameterized.named_parameters(('without_groupnorm', False),
('with_groupnorm', True))
def testMobilenetV3WithReduceMean(self, use_groupnorm):
_, _ = mobilenet_v3.mobilenet(
tf.placeholder(tf.float32, (1, 224, 224, 3)),
conv_defs=mobilenet_v3.V3_SMALL,
use_groupnorm=use_groupnorm,
use_reduce_mean_for_pooling=True)
g = tf.get_default_graph()
reduce_mean = [v for v in g.get_operations() if 'ReduceMean' in v.name]
self.assertNotEmpty(reduce_mean)
self.assertVariablesHaveNormalizerFn(use_groupnorm)
@parameterized.named_parameters(('without_groupnorm', False),
('with_groupnorm', True))
def testMobilenetV3WithOutReduceMean(self, use_groupnorm):
_, _ = mobilenet_v3.mobilenet(
tf.placeholder(tf.float32, (1, 224, 224, 3)),
conv_defs=mobilenet_v3.V3_SMALL,
use_groupnorm=use_groupnorm,
use_reduce_mean_for_pooling=False)
g = tf.get_default_graph()
reduce_mean = [v for v in g.get_operations() if 'ReduceMean' in v.name]
self.assertEmpty(reduce_mean)
self.assertVariablesHaveNormalizerFn(use_groupnorm)
if __name__ == '__main__':
# absltest.main()
tf.test.main()
| apache-2.0 | -6,931,470,342,817,953,000 | 41.235714 | 87 | 0.663623 | false |
pv/scikit-learn | sklearn/tree/tests/test_export.py | 76 | 9318 | """
Testing for export functions of decision trees (sklearn.tree.export).
"""
from numpy.testing import assert_equal
from nose.tools import assert_raises
from sklearn.tree import DecisionTreeClassifier, DecisionTreeRegressor
from sklearn.tree import export_graphviz
from sklearn.externals.six import StringIO
# toy sample
X = [[-2, -1], [-1, -1], [-1, -2], [1, 1], [1, 2], [2, 1]]
y = [-1, -1, -1, 1, 1, 1]
y2 = [[-1, 1], [-1, 2], [-1, 3], [1, 1], [1, 2], [1, 3]]
w = [1, 1, 1, .5, .5, .5]
def test_graphviz_toy():
# Check correctness of export_graphviz
clf = DecisionTreeClassifier(max_depth=3,
min_samples_split=1,
criterion="gini",
random_state=2)
clf.fit(X, y)
# Test export code
out = StringIO()
export_graphviz(clf, out_file=out)
contents1 = out.getvalue()
contents2 = 'digraph Tree {\n' \
'node [shape=box] ;\n' \
'0 [label="X[0] <= 0.0\\ngini = 0.5\\nsamples = 6\\n' \
'value = [3, 3]"] ;\n' \
'1 [label="gini = 0.0\\nsamples = 3\\nvalue = [3, 0]"] ;\n' \
'0 -> 1 [labeldistance=2.5, labelangle=45, ' \
'headlabel="True"] ;\n' \
'2 [label="gini = 0.0\\nsamples = 3\\nvalue = [0, 3]"] ;\n' \
'0 -> 2 [labeldistance=2.5, labelangle=-45, ' \
'headlabel="False"] ;\n' \
'}'
assert_equal(contents1, contents2)
# Test with feature_names
out = StringIO()
export_graphviz(clf, out_file=out, feature_names=["feature0", "feature1"])
contents1 = out.getvalue()
contents2 = 'digraph Tree {\n' \
'node [shape=box] ;\n' \
'0 [label="feature0 <= 0.0\\ngini = 0.5\\nsamples = 6\\n' \
'value = [3, 3]"] ;\n' \
'1 [label="gini = 0.0\\nsamples = 3\\nvalue = [3, 0]"] ;\n' \
'0 -> 1 [labeldistance=2.5, labelangle=45, ' \
'headlabel="True"] ;\n' \
'2 [label="gini = 0.0\\nsamples = 3\\nvalue = [0, 3]"] ;\n' \
'0 -> 2 [labeldistance=2.5, labelangle=-45, ' \
'headlabel="False"] ;\n' \
'}'
assert_equal(contents1, contents2)
# Test with class_names
out = StringIO()
export_graphviz(clf, out_file=out, class_names=["yes", "no"])
contents1 = out.getvalue()
contents2 = 'digraph Tree {\n' \
'node [shape=box] ;\n' \
'0 [label="X[0] <= 0.0\\ngini = 0.5\\nsamples = 6\\n' \
'value = [3, 3]\\nclass = yes"] ;\n' \
'1 [label="gini = 0.0\\nsamples = 3\\nvalue = [3, 0]\\n' \
'class = yes"] ;\n' \
'0 -> 1 [labeldistance=2.5, labelangle=45, ' \
'headlabel="True"] ;\n' \
'2 [label="gini = 0.0\\nsamples = 3\\nvalue = [0, 3]\\n' \
'class = no"] ;\n' \
'0 -> 2 [labeldistance=2.5, labelangle=-45, ' \
'headlabel="False"] ;\n' \
'}'
assert_equal(contents1, contents2)
# Test plot_options
out = StringIO()
export_graphviz(clf, out_file=out, filled=True, impurity=False,
proportion=True, special_characters=True, rounded=True)
contents1 = out.getvalue()
contents2 = 'digraph Tree {\n' \
'node [shape=box, style="filled, rounded", color="black", ' \
'fontname=helvetica] ;\n' \
'edge [fontname=helvetica] ;\n' \
'0 [label=<X<SUB>0</SUB> ≤ 0.0<br/>samples = 100.0%<br/>' \
'value = [0.5, 0.5]>, fillcolor="#e5813900"] ;\n' \
'1 [label=<samples = 50.0%<br/>value = [1.0, 0.0]>, ' \
'fillcolor="#e58139ff"] ;\n' \
'0 -> 1 [labeldistance=2.5, labelangle=45, ' \
'headlabel="True"] ;\n' \
'2 [label=<samples = 50.0%<br/>value = [0.0, 1.0]>, ' \
'fillcolor="#399de5ff"] ;\n' \
'0 -> 2 [labeldistance=2.5, labelangle=-45, ' \
'headlabel="False"] ;\n' \
'}'
assert_equal(contents1, contents2)
# Test max_depth
out = StringIO()
export_graphviz(clf, out_file=out, max_depth=0, class_names=True)
contents1 = out.getvalue()
contents2 = 'digraph Tree {\n' \
'node [shape=box] ;\n' \
'0 [label="X[0] <= 0.0\\ngini = 0.5\\nsamples = 6\\n' \
'value = [3, 3]\\nclass = y[0]"] ;\n' \
'1 [label="(...)"] ;\n' \
'0 -> 1 ;\n' \
'2 [label="(...)"] ;\n' \
'0 -> 2 ;\n' \
'}'
assert_equal(contents1, contents2)
# Test max_depth with plot_options
out = StringIO()
export_graphviz(clf, out_file=out, max_depth=0, filled=True,
node_ids=True)
contents1 = out.getvalue()
contents2 = 'digraph Tree {\n' \
'node [shape=box, style="filled", color="black"] ;\n' \
'0 [label="node #0\\nX[0] <= 0.0\\ngini = 0.5\\n' \
'samples = 6\\nvalue = [3, 3]", fillcolor="#e5813900"] ;\n' \
'1 [label="(...)", fillcolor="#C0C0C0"] ;\n' \
'0 -> 1 ;\n' \
'2 [label="(...)", fillcolor="#C0C0C0"] ;\n' \
'0 -> 2 ;\n' \
'}'
assert_equal(contents1, contents2)
# Test multi-output with weighted samples
clf = DecisionTreeClassifier(max_depth=2,
min_samples_split=1,
criterion="gini",
random_state=2)
clf = clf.fit(X, y2, sample_weight=w)
out = StringIO()
export_graphviz(clf, out_file=out, filled=True, impurity=False)
contents1 = out.getvalue()
contents2 = 'digraph Tree {\n' \
'node [shape=box, style="filled", color="black"] ;\n' \
'0 [label="X[0] <= 0.0\\nsamples = 6\\n' \
'value = [[3.0, 1.5, 0.0]\\n' \
'[1.5, 1.5, 1.5]]", fillcolor="#e5813900"] ;\n' \
'1 [label="X[1] <= -1.5\\nsamples = 3\\n' \
'value = [[3, 0, 0]\\n[1, 1, 1]]", ' \
'fillcolor="#e5813965"] ;\n' \
'0 -> 1 [labeldistance=2.5, labelangle=45, ' \
'headlabel="True"] ;\n' \
'2 [label="samples = 1\\nvalue = [[1, 0, 0]\\n' \
'[0, 0, 1]]", fillcolor="#e58139ff"] ;\n' \
'1 -> 2 ;\n' \
'3 [label="samples = 2\\nvalue = [[2, 0, 0]\\n' \
'[1, 1, 0]]", fillcolor="#e581398c"] ;\n' \
'1 -> 3 ;\n' \
'4 [label="X[0] <= 1.5\\nsamples = 3\\n' \
'value = [[0.0, 1.5, 0.0]\\n[0.5, 0.5, 0.5]]", ' \
'fillcolor="#e5813965"] ;\n' \
'0 -> 4 [labeldistance=2.5, labelangle=-45, ' \
'headlabel="False"] ;\n' \
'5 [label="samples = 2\\nvalue = [[0.0, 1.0, 0.0]\\n' \
'[0.5, 0.5, 0.0]]", fillcolor="#e581398c"] ;\n' \
'4 -> 5 ;\n' \
'6 [label="samples = 1\\nvalue = [[0.0, 0.5, 0.0]\\n' \
'[0.0, 0.0, 0.5]]", fillcolor="#e58139ff"] ;\n' \
'4 -> 6 ;\n' \
'}'
assert_equal(contents1, contents2)
# Test regression output with plot_options
clf = DecisionTreeRegressor(max_depth=3,
min_samples_split=1,
criterion="mse",
random_state=2)
clf.fit(X, y)
out = StringIO()
export_graphviz(clf, out_file=out, filled=True, leaves_parallel=True,
rotate=True, rounded=True)
contents1 = out.getvalue()
contents2 = 'digraph Tree {\n' \
'node [shape=box, style="filled, rounded", color="black", ' \
'fontname=helvetica] ;\n' \
'graph [ranksep=equally, splines=polyline] ;\n' \
'edge [fontname=helvetica] ;\n' \
'rankdir=LR ;\n' \
'0 [label="X[0] <= 0.0\\nmse = 1.0\\nsamples = 6\\n' \
'value = 0.0", fillcolor="#e581397f"] ;\n' \
'1 [label="mse = 0.0\\nsamples = 3\\nvalue = -1.0", ' \
'fillcolor="#e5813900"] ;\n' \
'0 -> 1 [labeldistance=2.5, labelangle=-45, ' \
'headlabel="True"] ;\n' \
'2 [label="mse = 0.0\\nsamples = 3\\nvalue = 1.0", ' \
'fillcolor="#e58139ff"] ;\n' \
'0 -> 2 [labeldistance=2.5, labelangle=45, ' \
'headlabel="False"] ;\n' \
'{rank=same ; 0} ;\n' \
'{rank=same ; 1; 2} ;\n' \
'}'
assert_equal(contents1, contents2)
def test_graphviz_errors():
# Check for errors of export_graphviz
clf = DecisionTreeClassifier(max_depth=3, min_samples_split=1)
clf.fit(X, y)
# Check feature_names error
out = StringIO()
assert_raises(IndexError, export_graphviz, clf, out, feature_names=[])
# Check class_names error
out = StringIO()
assert_raises(IndexError, export_graphviz, clf, out, class_names=[])
| bsd-3-clause | 142,422,423,081,847,460 | 40.413333 | 78 | 0.449775 | false |
igel-kun/pyload | module/plugins/hoster/XDCC.py | 1 | 3550 | # -*- coding: utf-8 -*-
import os
import re
from module.plugins.internal.Hoster import Hoster
from module.network.XDCCRequest import XDCCRequest
from module.plugins.internal.misc import parse_name, safejoin
class XDCC(Hoster):
__name__ = "XDCC"
__type__ = "hoster"
__version__ = "99"
__status__ = "testing"
__pattern__ = r'(?:xdcc|irc)://(?P<SERVER>.*?)/#?(?P<CHAN>.*?)/(?P<BOT>.*?)/#?(?P<PACK>\d+)/?'
# mimic IRSSI v0.8.6 by default
__config__ = [("nick", "str", "Nickname", "pyload" ),
("passowrd", "str", "Password for the nickname", "" ),
("realname", "str", "Realname", "really pyload" ),
("ctcp_version", "str", "Version string to send on CTCP VERSION requests", "irssi v0.8.6 - running on FreeBSD i686"),
("passive_port", "str", "Local port to open for passive DCC - 0 for auto select, X-Y for a range", 0)]
__description__ = """Download from IRC XDCC bot"""
__license__ = "GPLv3"
__authors__ = [("jeix", "[email protected]" ),
("GammaC0de", "nitzo2001[AT]yahoo[DOT]com"),
("igel", "")]
# NETWORK rules are commands to send to the server on connection, depending on the server name
NETWORK_RULES = [(r'abjects', ['JOIN #mg-chat']), (r'abandoned-irc', ['JOIN #zw-chat'])]
# PRIVMSG rules are rules to turn private messages from anyone whose name matches rule[0] into commands using re.sub(rule[1], rule[2])
PRIVMSG_RULES = [(r"(?:@staff|Zombies)", r".*you must /?join .*?(#[^ ]*) .*to download.*", r"JOIN \1")]
# ERROR patterns are patterns that, when received as a private notice, cause the download to fail
ERROR_PATTERN = r"(invalid pack|try again)"
def setup(self):
# TODO: find a way to do multiDL for different servers
self.multiDL = False
def parse_server(self, server):
temp = server.split(':')
server = temp[0]
if len(temp) == 2:
try:
port = int(temp[1])
except ValueError:
self.fail(_("Error: Erroneous port: %s." % temp[1]))
return (server, port)
elif len(temp) == 1:
return (server, 6667)
else:
self.fail(_("Invalid hostname for IRC Server: %s") % server)
def setup_base(self):
# check for duplicates before get_info() overwrites our perfectly good pyfile.name from a previous attempt with the silly "url"
self.check_duplicates()
def process(self, pyfile):
dl_basename = parse_name(pyfile.name)
dl_folder = self.pyload.config.get('general', 'download_folder')
dl_dirname = safejoin(dl_folder, pyfile.package().folder)
dl_filename = safejoin(dl_dirname, dl_basename)
try:
server, chan, bot, pack = re.match(self.__pattern__, pyfile.url).groups()
nick = self.config.get('nick')
password = self.config.get('password')
realname = self.config.get('realname')
# split the port from the server
server,port = self.parse_server(server)
except Exception:
self.fail(_("malformed XDCC URI: %s - expected xdcc://server[:port]/chan/bot/pack" % pyfile.url))
self.req = XDCCRequest(self, pyfile)
self.req.download(server, port, chan, bot, pack, dl_filename, True, nick, password, realname)
| gpl-3.0 | 5,521,874,226,707,542,000 | 42.82716 | 138 | 0.561127 | false |
nyuwireless/ns3-mmwave | .waf-1.8.12-f00e5b53f6bbeab1384a38c9cc5d51f7/waflib/Tools/dmd.py | 21 | 1487 | #! /usr/bin/env python
# encoding: utf-8
# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
import sys
from waflib.Tools import ar,d
from waflib.Configure import conf
@conf
def find_dmd(conf):
conf.find_program(['dmd','dmd2','ldc'],var='D')
out=conf.cmd_and_log(conf.env.D+['--help'])
if out.find("D Compiler v")==-1:
out=conf.cmd_and_log(conf.env.D+['-version'])
if out.find("based on DMD v1.")==-1:
conf.fatal("detected compiler is not dmd/ldc")
@conf
def common_flags_ldc(conf):
v=conf.env
v['DFLAGS']=['-d-version=Posix']
v['LINKFLAGS']=[]
v['DFLAGS_dshlib']=['-relocation-model=pic']
@conf
def common_flags_dmd(conf):
v=conf.env
v['D_SRC_F']=['-c']
v['D_TGT_F']='-of%s'
v['D_LINKER']=v['D']
v['DLNK_SRC_F']=''
v['DLNK_TGT_F']='-of%s'
v['DINC_ST']='-I%s'
v['DSHLIB_MARKER']=v['DSTLIB_MARKER']=''
v['DSTLIB_ST']=v['DSHLIB_ST']='-L-l%s'
v['DSTLIBPATH_ST']=v['DLIBPATH_ST']='-L-L%s'
v['LINKFLAGS_dprogram']=['-quiet']
v['DFLAGS_dshlib']=['-fPIC']
v['LINKFLAGS_dshlib']=['-L-shared']
v['DHEADER_ext']='.di'
v.DFLAGS_d_with_header=['-H','-Hf']
v['D_HDR_F']='%s'
def configure(conf):
conf.find_dmd()
if sys.platform=='win32':
out=conf.cmd_and_log(conf.env.D+['--help'])
if out.find("D Compiler v2.")>-1:
conf.fatal('dmd2 on Windows is not supported, use gdc or ldc2 instead')
conf.load('ar')
conf.load('d')
conf.common_flags_dmd()
conf.d_platform_flags()
if str(conf.env.D).find('ldc')>-1:
conf.common_flags_ldc()
| gpl-2.0 | 6,021,294,095,337,380,000 | 28.156863 | 78 | 0.628783 | false |
OrlandoSoto/retirement | retirement_api/urls.py | 1 | 1116 | from django.conf.urls import patterns, include, url
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.contrib import admin
from django.conf import settings
admin.autodiscover()
urlpatterns = patterns('',
# url(r'^retirement-api/admin/', include(admin.site.urls)),
url(r'^retirement-api/estimator/$', 'retirement_api.views.estimator', name='estimator'),
url(r'^retirement-api/estimator/(?P<dob>[^/]+)/(?P<income>\d+)/$', 'retirement_api.views.estimator', name='estimator'),
url(r'^retirement/retirement-api/estimator/(?P<dob>[^/]+)/(?P<income>\d+)/$', 'retirement_api.views.estimator', name='estimator'),
url(r'^retirement-api/get-retirement-age/(?P<birth_year>\d+)/$', 'retirement_api.views.get_full_retirement_age', name='get_full_retirement_age'),
url(r'^claiming-social-security/$', 'retirement_api.views.claiming', name='claiming'),
url(r'^claiming-social-security/es/$', 'retirement_api.views.claiming', {'es': True}),
url(r'^retirement/static\/(?P<path>.*)/$', 'django.contrib.staticfiles.views.serve')
)
urlpatterns += staticfiles_urlpatterns()
| cc0-1.0 | 5,216,972,956,740,240,000 | 57.736842 | 149 | 0.707885 | false |
s20121035/rk3288_android5.1_repo | frameworks/base/tools/layoutlib/rename_font/build_font_single.py | 1 | 6688 | #!/usr/bin/env python
# Copyright (C) 2014 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Rename the PS name of the input font.
OpenType fonts (*.otf) are not currently supported. They are copied to the destination without renaming.
XML files are also copied in case they are passed there by mistake.
Usage: build_font_single.py /path/to/input_font.ttf /path/to/output_font.ttf
"""
import glob
import os
import re
import shutil
import sys
import xml.etree.ElementTree as etree
# Prevent .pyc files from being created.
sys.dont_write_bytecode = True
# fontTools is available at platform/external/fonttools
from fontTools import ttx
class FontInfo(object):
family = None
style = None
version = None
ends_in_regular = False
fullname = None
class InvalidFontException(Exception):
pass
# A constant to copy the font without modifying. This is useful when running
# locally and speed up the time to build the SDK.
COPY_ONLY = False
# These constants represent the value of nameID parameter in the namerecord for
# different information.
# see http://scripts.sil.org/cms/scripts/page.php?item_id=IWS-Chapter08#3054f18b
NAMEID_FAMILY = 1
NAMEID_STYLE = 2
NAMEID_FULLNAME = 4
NAMEID_VERSION = 5
# A list of extensions to process.
EXTENSIONS = ['.ttf', '.otf', '.xml']
def main(argv):
if len(argv) < 2:
print 'Incorrect usage: ' + str(argv)
sys.exit('Usage: build_font_single.py /path/to/input/font.ttf /path/to/out/font.ttf')
dest_path = argv[-1]
input_path = argv[0]
extension = os.path.splitext(input_path)[1].lower()
if extension in EXTENSIONS:
if not COPY_ONLY and extension == '.ttf':
convert_font(input_path, dest_path)
return
shutil.copy(input_path, dest_path)
def convert_font(input_path, dest_path):
filename = os.path.basename(input_path)
print 'Converting font: ' + filename
# the path to the output file. The file name is the fontfilename.ttx
ttx_path = dest_path[:-1] + 'x'
try:
# run ttx to generate an xml file in the output folder which represents all
# its info
ttx_args = ['-q', '-o', ttx_path, input_path]
ttx.main(ttx_args)
# now parse the xml file to change its PS name.
tree = etree.parse(ttx_path)
root = tree.getroot()
for name in root.iter('name'):
update_tag(name, get_font_info(name))
tree.write(ttx_path, xml_declaration=True, encoding='utf-8')
# generate the udpated font now.
ttx_args = ['-q', '-o', dest_path, ttx_path]
ttx.main(ttx_args)
except InvalidFontException:
# In case of invalid fonts, we exit.
print filename + ' is not a valid font'
raise
except Exception as e:
print 'Error converting font: ' + filename
print e
# Some fonts are too big to be handled by the ttx library.
# Just copy paste them.
shutil.copy(input_path, dest_path)
try:
# delete the temp ttx file is it exists.
os.remove(ttx_path)
except OSError:
pass
def get_font_info(tag):
""" Returns a list of FontInfo representing the various sets of namerecords
found in the name table of the font. """
fonts = []
font = None
last_name_id = sys.maxint
for namerecord in tag.iter('namerecord'):
if 'nameID' in namerecord.attrib:
name_id = int(namerecord.attrib['nameID'])
# A new font should be created for each platform, encoding and language
# id. But, since the nameIDs are sorted, we use the easy approach of
# creating a new one when the nameIDs reset.
if name_id <= last_name_id and font is not None:
fonts.append(font)
font = None
last_name_id = name_id
if font is None:
font = FontInfo()
if name_id == NAMEID_FAMILY:
font.family = namerecord.text.strip()
if name_id == NAMEID_STYLE:
font.style = namerecord.text.strip()
if name_id == NAMEID_FULLNAME:
font.ends_in_regular = ends_in_regular(namerecord.text)
font.fullname = namerecord.text.strip()
if name_id == NAMEID_VERSION:
font.version = get_version(namerecord.text)
if font is not None:
fonts.append(font)
return fonts
def update_tag(tag, fonts):
last_name_id = sys.maxint
fonts_iterator = fonts.__iter__()
font = None
for namerecord in tag.iter('namerecord'):
if 'nameID' in namerecord.attrib:
name_id = int(namerecord.attrib['nameID'])
if name_id <= last_name_id:
font = fonts_iterator.next()
font = update_font_name(font)
last_name_id = name_id
if name_id == NAMEID_FAMILY:
namerecord.text = font.family
if name_id == NAMEID_FULLNAME:
namerecord.text = font.fullname
def update_font_name(font):
""" Compute the new font family name and font fullname. If the font has a
valid version, it's sanitized and appended to the font family name. The
font fullname is then created by joining the new family name and the
style. If the style is 'Regular', it is appended only if the original font
had it. """
if font.family is None or font.style is None:
raise InvalidFontException('Font doesn\'t have proper family name or style')
if font.version is not None:
new_family = font.family + font.version
else:
new_family = font.family
if font.style is 'Regular' and not font.ends_in_regular:
font.fullname = new_family
else:
font.fullname = new_family + ' ' + font.style
font.family = new_family
return font
def ends_in_regular(string):
""" According to the specification, the font fullname should not end in
'Regular' for plain fonts. However, some fonts don't obey this rule. We
keep the style info, to minimize the diff. """
string = string.strip().split()[-1]
return string is 'Regular'
def get_version(string):
# The string must begin with 'Version n.nn '
# to extract n.nn, we return the second entry in the split strings.
string = string.strip()
if not string.startswith('Version '):
raise InvalidFontException('mal-formed font version')
return sanitize(string.split()[1])
def sanitize(string):
return re.sub(r'[^\w-]+', '', string)
if __name__ == '__main__':
main(sys.argv[1:])
| gpl-3.0 | -5,051,433,683,380,599,000 | 31.153846 | 104 | 0.683762 | false |
arcivanov/pybuilder | src/integrationtest/python/smoke_setup_tests.py | 3 | 1045 | # -*- coding: utf-8 -*-
#
# This file is part of PyBuilder
#
# Copyright 2011-2020 PyBuilder Team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from smoke_itest_support import SmokeIntegrationTestSupport
class SetupSmokeTest(SmokeIntegrationTestSupport):
PROJECT_FILES = list(SmokeIntegrationTestSupport.PROJECT_FILES) + ["setup.py"]
def test_smoke_setup_install(self):
self.smoke_test_module("pip", "-vvvvvvvvvvvvvv", "install", ".")
if __name__ == "__main__":
unittest.main()
| apache-2.0 | -3,214,572,679,451,836,000 | 31.65625 | 82 | 0.717703 | false |
neurodebian/htcondor | src/condor_contrib/condor_pigeon/src/condor_pigeon_client/skype_linux_tools/Skype4Py/Languages/pl.py | 10 | 7899 | apiAttachAvailable = u'API jest dost\u0119pne'
apiAttachNotAvailable = u'Niedost\u0119pny'
apiAttachPendingAuthorization = u'Autoryzacja w toku'
apiAttachRefused = u'Odmowa'
apiAttachSuccess = u'Sukces'
apiAttachUnknown = u'Nieznany'
budDeletedFriend = u'Usuni\u0119ty z listy znajomych'
budFriend = u'Znajomy'
budNeverBeenFriend = u'Nigdy nie by\u0142 na li\u015bcie znajomych'
budPendingAuthorization = u'Autoryzacja w toku'
budUnknown = u'Nieznany'
cfrBlockedByRecipient = u'Po\u0142\u0105czenie zablokowane przez odbiorc\u0119'
cfrMiscError = u'B\u0142\u0105d'
cfrNoCommonCodec = u'Brak podstawowego kodeka'
cfrNoProxyFound = u'Nie odnaleziono serwera proksy'
cfrNotAuthorizedByRecipient = u'Ten u\u017cytkownik nie ma autoryzacji odbiorcy'
cfrRecipientNotFriend = u'Odbiorca nie jest znajomym'
cfrRemoteDeviceError = u'Problem ze zdalnym urz\u0105dzeniem d\u017awi\u0119kowym'
cfrSessionTerminated = u'Sesja zako\u0144czona'
cfrSoundIOError = u'B\u0142\u0105d d\u017awi\u0119ku przychodz\u0105cego lub wychodz\u0105cego'
cfrSoundRecordingError = u'B\u0142\u0105d nagrywania d\u017awi\u0119ku'
cfrUnknown = u'Nieznany'
cfrUserDoesNotExist = u'Taki u\u017cytkownik lub numer telefonu nie istnieje'
cfrUserIsOffline = u'Ona lub On jest niedost\u0119pny'
chsAllCalls = u'Wszystkie'
chsDialog = u'Dialog'
chsIncomingCalls = u'Zaakceptuj wielu uczestnik\xf3w'
chsLegacyDialog = u'Dialog przestarza\u0142y'
chsMissedCalls = u'Nie odebrane'
chsMultiNeedAccept = u'Zaakceptuj wielu uczestnik\xf3w'
chsMultiSubscribed = u'Wielu subskrybowanych'
chsOutgoingCalls = u'Wielu subskrybowanych'
chsUnknown = u'Nieznany'
chsUnsubscribed = u'Nie jest abonentem'
clsBusy = u'Zaj\u0119te'
clsCancelled = u'Anulowane'
clsEarlyMedia = u'Odtwarzanie wczesnych medi\xf3w (Early Media)'
clsFailed = u'Niestety, nieudane po\u0142\u0105czenie!'
clsFinished = u'Zako\u0144czono'
clsInProgress = u'Rozmowa w toku'
clsLocalHold = u'Zawieszona przez u\u017cytkownika'
clsMissed = u'Nieodebrana rozmowa'
clsOnHold = u'Zawieszona'
clsRefused = u'Odmowa'
clsRemoteHold = u'Zawieszona przez odbiorc\u0119'
clsRinging = u'Dzwoni'
clsRouting = u'Trasowanie'
clsTransferred = u'Nieznany'
clsTransferring = u'Nieznany'
clsUnknown = u'Nieznany'
clsUnplaced = u'Nigdy nie \u0142aczono'
clsVoicemailBufferingGreeting = u'Pozdrowienia podczas buforowania'
clsVoicemailCancelled = u'Poczta g\u0142osowa anulowana'
clsVoicemailFailed = u'B\u0142\u0105d poczty g\u0142osowej'
clsVoicemailPlayingGreeting = u'Odtwarzanie pozdrowienia'
clsVoicemailRecording = u'Nagrywanie poczty g\u0142osowej'
clsVoicemailSent = u'Poczta g\u0142osowa wys\u0142ana'
clsVoicemailUploading = u'Wysy\u0142anie poczty g\u0142osowej'
cltIncomingP2P = u'Rozmowa przychodz\u0105ca peer-to-peer'
cltIncomingPSTN = u'Rozmowa przychodz\u0105ca'
cltOutgoingP2P = u'Rozmowa wychodz\u0105ca peer-to-peer'
cltOutgoingPSTN = u'Rozmowa wychodz\u0105ca'
cltUnknown = u'Nieznany'
cmeAddedMembers = u'Cz\u0142onkowie dodani'
cmeCreatedChatWith = u'Rozpocz\u0119ty czat z'
cmeEmoted = u'Emoted'
cmeLeft = u'Opusci\u0142'
cmeSaid = u'Powiedzia\u0142'
cmeSawMembers = u'Zobaczy\u0142e\u015b cz\u0142onk\xf3w'
cmeSetTopic = u'Ustaw temat'
cmeUnknown = u'Nieznany'
cmsRead = u'Przeczyta\u0142'
cmsReceived = u'Otrzyma\u0142'
cmsSending = u'Wysy\u0142am...'
cmsSent = u'Wys\u0142any'
cmsUnknown = u'Nieznany'
conConnecting = u'\u0141\u0105czenie'
conOffline = u'Niepod\u0142\u0105czony'
conOnline = u'Dost\u0119pny'
conPausing = u'Wstrzymane'
conUnknown = u'Nieznany'
cusAway = u'Zaraz wracam'
cusDoNotDisturb = u'Nie przeszkadza\u0107'
cusInvisible = u'Niewidoczny'
cusLoggedOut = u'Niepod\u0142\u0105czony'
cusNotAvailable = u'Niedost\u0119pny'
cusOffline = u'Niepod\u0142\u0105czony'
cusOnline = u'Dost\u0119pny'
cusSkypeMe = u"Tryb 'Skype Me'"
cusUnknown = u'Nieznany'
cvsBothEnabled = u'Odbierz i odbierz wideo'
cvsNone = u'Bez wideo'
cvsReceiveEnabled = u'Odbierz wideo'
cvsSendEnabled = u'Wy\u015blij wideo'
cvsUnknown = u'Nieznany'
grpAllFriends = u'Wszyscy znajomi'
grpAllUsers = u'Wszyscy u\u017cytkownicy'
grpCustomGroup = u'Niestandardowe'
grpOnlineFriends = u'Znajomi w sieci'
grpPendingAuthorizationFriends = u'Autoryzacja w toku'
grpProposedSharedGroup = u'Propozycja grupy wsp\xf3\u0142dzielonej'
grpRecentlyContactedUsers = u'Ostatnie kontakty'
grpSharedGroup = u'Wsp\xf3\u0142dzielona grupa'
grpSkypeFriends = u'Znajomi ze Skype'
grpSkypeOutFriends = u'Znajomi ze SkypeOut'
grpUngroupedFriends = u'Znajomi spoza grupy'
grpUnknown = u'Nieznany'
grpUsersAuthorizedByMe = u'Moja autoryzacja'
grpUsersBlockedByMe = u'Moja blokada'
grpUsersWaitingMyAuthorization = u'Pro\u015bba o autoryzacj\u0119'
leaAddDeclined = u'Dodawanie odrzucone'
leaAddedNotAuthorized = u'Osoba dodawana musi by\u0107 autoryzowana'
leaAdderNotFriend = u'Osoba dodaj\u0105ca musi by\u0107 znajomym'
leaUnknown = u'Nieznany'
leaUnsubscribe = u'Nie jest abonentem'
leaUserIncapable = u'U\u017cytkownik nie mo\u017ce rozmawia\u0107'
leaUserNotFound = u'U\u017cytkownik nie zosta\u0142 znaleziony'
olsAway = u'Zaraz wracam'
olsDoNotDisturb = u'Nie przeszkadza\u0107'
olsNotAvailable = u'Niedost\u0119pny'
olsOffline = u'Niepod\u0142\u0105czony'
olsOnline = u'Dost\u0119pny'
olsSkypeMe = u"Tryb 'Skype Me'"
olsSkypeOut = u'SkypeOut'
olsUnknown = u'Nieznany'
smsMessageStatusComposing = u'Tworzenie'
smsMessageStatusDelivered = u'Dor\u0119czona'
smsMessageStatusFailed = u'Nieudane'
smsMessageStatusRead = u'Read'
smsMessageStatusReceived = u'Otrzymany'
smsMessageStatusSendingToServer = u'Sending to Server'
smsMessageStatusSentToServer = u'Wys\u0142ana do serwera'
smsMessageStatusSomeTargetsFailed = u'Niekt\xf3re numery nieudane'
smsMessageStatusUnknown = u'Nieznany'
smsMessageTypeCCRequest = u'Pro\u015bba o kod potwierdzaj\u0105cy'
smsMessageTypeCCSubmit = u'Wys\u0142anie kodu potwierdzaj\u0105cego'
smsMessageTypeIncoming = u'Przychodz\u0105ca'
smsMessageTypeOutgoing = u'Outgoing'
smsMessageTypeUnknown = u'Unknown'
smsTargetStatusAcceptable = u'Akceptowalny'
smsTargetStatusAnalyzing = u'Analiza'
smsTargetStatusDeliveryFailed = u'Nieudane'
smsTargetStatusDeliveryPending = u'Oczekuje'
smsTargetStatusDeliverySuccessful = u'Dor\u0119czona'
smsTargetStatusNotRoutable = u'Brak trasy'
smsTargetStatusUndefined = u'Niezdefiniowana'
smsTargetStatusUnknown = u'Nieznany'
usexFemale = u'Kobieta'
usexMale = u'M\u0119\u017cczyzna'
usexUnknown = u'Nieznany'
vmrConnectError = u'B\u0142\u0105d po\u0142\u0105czenia'
vmrFileReadError = u'B\u0142\u0105d odczytu pliku'
vmrFileWriteError = u'B\u0142\u0105d zapisu pliku'
vmrMiscError = u'B\u0142\u0105d'
vmrNoError = u'Bez b\u0142\u0119du'
vmrNoPrivilege = u'Brak uprawnie\u0144 Voicemail'
vmrNoVoicemail = u'Taka poczta g\u0142osowa nie istnieje'
vmrPlaybackError = u'B\u0142\u0105d odtwarzania'
vmrRecordingError = u'B\u0142\u0105d nagrywania'
vmrUnknown = u'Nieznany'
vmsBlank = u'Pusty'
vmsBuffering = u'Buforowanie'
vmsDeleting = u'Usuwanie'
vmsDownloading = u'Trwa pobieranie'
vmsFailed = u'Nie powiodlo si\u0119'
vmsNotDownloaded = u'Niepobrany'
vmsPlayed = u'Odtworzony'
vmsPlaying = u'Odtwarzanie'
vmsRecorded = u'Nagrany'
vmsRecording = u'Nagrywanie poczty g\u0142osowej'
vmsUnknown = u'Nieznany'
vmsUnplayed = u'Nieodtworzony'
vmsUploaded = u'Przekazany'
vmsUploading = u'Przekazywanie'
vmtCustomGreeting = u'Pozdrowienia niestandardowe'
vmtDefaultGreeting = u'Pozdrowienia domy\u015blne'
vmtIncoming = u'przysy\u0142ana jest wiadomo\u015b\u0107 g\u0142osowa'
vmtOutgoing = u'Wychodz\u0105ca'
vmtUnknown = u'Nieznany'
vssAvailable = u'Dost\u0119pny'
vssNotAvailable = u'Niedostepny'
vssPaused = u'Wstrzymane'
vssRejected = u'Odrzucona'
vssRunning = u'Trwaj\u0105ca'
vssStarting = u'Rozpocz\u0119cie'
vssStopping = u'Zatrzymanie'
vssUnknown = u'Nieznany'
| apache-2.0 | -8,796,664,608,180,221,000 | 40.240642 | 95 | 0.791493 | false |
miptliot/edx-platform | common/djangoapps/terrain/stubs/catalog.py | 19 | 1690 | """
Stub implementation of catalog service for acceptance tests
"""
# pylint: disable=invalid-name, missing-docstring
import re
import urlparse
from .http import StubHttpRequestHandler, StubHttpService
class StubCatalogServiceHandler(StubHttpRequestHandler):
def do_GET(self):
pattern_handlers = {
r'/api/v1/programs/$': self.program_list,
r'/api/v1/programs/([0-9a-f-]+)/$': self.program_detail,
r'/api/v1/program_types/$': self.program_types,
}
if self.match_pattern(pattern_handlers):
return
self.send_response(404, content='404 Not Found')
def match_pattern(self, pattern_handlers):
"""
Find the correct handler method given the path info from the HTTP request.
"""
path = urlparse.urlparse(self.path).path
for pattern, handler in pattern_handlers.items():
match = re.match(pattern, path)
if match:
handler(*match.groups())
return True
def program_list(self):
"""Stub the catalog's program list endpoint."""
programs = self.server.config.get('catalog.programs', [])
self.send_json_response(programs)
def program_detail(self, program_uuid):
"""Stub the catalog's program detail endpoint."""
program = self.server.config.get('catalog.programs.' + program_uuid)
self.send_json_response(program)
def program_types(self):
program_types = self.server.config.get('catalog.programs_types', [])
self.send_json_response(program_types)
class StubCatalogService(StubHttpService):
HANDLER_CLASS = StubCatalogServiceHandler
| agpl-3.0 | -5,810,156,140,571,770,000 | 31.5 | 82 | 0.643195 | false |
firebase/grpc-SwiftPM | src/python/grpcio/grpc_core_dependencies.py | 1 | 48612 | # Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# AUTO-GENERATED FROM `$REPO_ROOT/templates/src/python/grpcio/grpc_core_dependencies.py.template`!!!
CORE_SOURCE_FILES = [
'src/core/ext/filters/census/grpc_context.cc',
'src/core/ext/filters/client_channel/backend_metric.cc',
'src/core/ext/filters/client_channel/backup_poller.cc',
'src/core/ext/filters/client_channel/channel_connectivity.cc',
'src/core/ext/filters/client_channel/client_channel.cc',
'src/core/ext/filters/client_channel/client_channel_channelz.cc',
'src/core/ext/filters/client_channel/client_channel_factory.cc',
'src/core/ext/filters/client_channel/client_channel_plugin.cc',
'src/core/ext/filters/client_channel/global_subchannel_pool.cc',
'src/core/ext/filters/client_channel/health/health_check_client.cc',
'src/core/ext/filters/client_channel/http_connect_handshaker.cc',
'src/core/ext/filters/client_channel/http_proxy.cc',
'src/core/ext/filters/client_channel/lb_policy.cc',
'src/core/ext/filters/client_channel/lb_policy/child_policy_handler.cc',
'src/core/ext/filters/client_channel/lb_policy/grpclb/client_load_reporting_filter.cc',
'src/core/ext/filters/client_channel/lb_policy/grpclb/grpclb.cc',
'src/core/ext/filters/client_channel/lb_policy/grpclb/grpclb_channel_secure.cc',
'src/core/ext/filters/client_channel/lb_policy/grpclb/grpclb_client_stats.cc',
'src/core/ext/filters/client_channel/lb_policy/grpclb/load_balancer_api.cc',
'src/core/ext/filters/client_channel/lb_policy/pick_first/pick_first.cc',
'src/core/ext/filters/client_channel/lb_policy/round_robin/round_robin.cc',
'src/core/ext/filters/client_channel/lb_policy/xds/cds.cc',
'src/core/ext/filters/client_channel/lb_policy/xds/xds.cc',
'src/core/ext/filters/client_channel/lb_policy_registry.cc',
'src/core/ext/filters/client_channel/local_subchannel_pool.cc',
'src/core/ext/filters/client_channel/parse_address.cc',
'src/core/ext/filters/client_channel/proxy_mapper_registry.cc',
'src/core/ext/filters/client_channel/resolver.cc',
'src/core/ext/filters/client_channel/resolver/dns/c_ares/dns_resolver_ares.cc',
'src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_ev_driver.cc',
'src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_ev_driver_libuv.cc',
'src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_ev_driver_posix.cc',
'src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_ev_driver_windows.cc',
'src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_wrapper.cc',
'src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_wrapper_fallback.cc',
'src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_wrapper_libuv.cc',
'src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_wrapper_posix.cc',
'src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_wrapper_windows.cc',
'src/core/ext/filters/client_channel/resolver/dns/dns_resolver_selection.cc',
'src/core/ext/filters/client_channel/resolver/dns/native/dns_resolver.cc',
'src/core/ext/filters/client_channel/resolver/fake/fake_resolver.cc',
'src/core/ext/filters/client_channel/resolver/sockaddr/sockaddr_resolver.cc',
'src/core/ext/filters/client_channel/resolver/xds/xds_resolver.cc',
'src/core/ext/filters/client_channel/resolver_registry.cc',
'src/core/ext/filters/client_channel/resolver_result_parsing.cc',
'src/core/ext/filters/client_channel/resolving_lb_policy.cc',
'src/core/ext/filters/client_channel/retry_throttle.cc',
'src/core/ext/filters/client_channel/server_address.cc',
'src/core/ext/filters/client_channel/service_config.cc',
'src/core/ext/filters/client_channel/subchannel.cc',
'src/core/ext/filters/client_channel/subchannel_pool_interface.cc',
'src/core/ext/filters/client_channel/xds/xds_api.cc',
'src/core/ext/filters/client_channel/xds/xds_bootstrap.cc',
'src/core/ext/filters/client_channel/xds/xds_channel_secure.cc',
'src/core/ext/filters/client_channel/xds/xds_client.cc',
'src/core/ext/filters/client_channel/xds/xds_client_stats.cc',
'src/core/ext/filters/client_idle/client_idle_filter.cc',
'src/core/ext/filters/deadline/deadline_filter.cc',
'src/core/ext/filters/http/client/http_client_filter.cc',
'src/core/ext/filters/http/client_authority_filter.cc',
'src/core/ext/filters/http/http_filters_plugin.cc',
'src/core/ext/filters/http/message_compress/message_compress_filter.cc',
'src/core/ext/filters/http/server/http_server_filter.cc',
'src/core/ext/filters/max_age/max_age_filter.cc',
'src/core/ext/filters/message_size/message_size_filter.cc',
'src/core/ext/filters/workarounds/workaround_cronet_compression_filter.cc',
'src/core/ext/filters/workarounds/workaround_utils.cc',
'src/core/ext/transport/chttp2/alpn/alpn.cc',
'src/core/ext/transport/chttp2/client/authority.cc',
'src/core/ext/transport/chttp2/client/chttp2_connector.cc',
'src/core/ext/transport/chttp2/client/insecure/channel_create.cc',
'src/core/ext/transport/chttp2/client/insecure/channel_create_posix.cc',
'src/core/ext/transport/chttp2/client/secure/secure_channel_create.cc',
'src/core/ext/transport/chttp2/server/chttp2_server.cc',
'src/core/ext/transport/chttp2/server/insecure/server_chttp2.cc',
'src/core/ext/transport/chttp2/server/insecure/server_chttp2_posix.cc',
'src/core/ext/transport/chttp2/server/secure/server_secure_chttp2.cc',
'src/core/ext/transport/chttp2/transport/bin_decoder.cc',
'src/core/ext/transport/chttp2/transport/bin_encoder.cc',
'src/core/ext/transport/chttp2/transport/chttp2_plugin.cc',
'src/core/ext/transport/chttp2/transport/chttp2_transport.cc',
'src/core/ext/transport/chttp2/transport/context_list.cc',
'src/core/ext/transport/chttp2/transport/flow_control.cc',
'src/core/ext/transport/chttp2/transport/frame_data.cc',
'src/core/ext/transport/chttp2/transport/frame_goaway.cc',
'src/core/ext/transport/chttp2/transport/frame_ping.cc',
'src/core/ext/transport/chttp2/transport/frame_rst_stream.cc',
'src/core/ext/transport/chttp2/transport/frame_settings.cc',
'src/core/ext/transport/chttp2/transport/frame_window_update.cc',
'src/core/ext/transport/chttp2/transport/hpack_encoder.cc',
'src/core/ext/transport/chttp2/transport/hpack_parser.cc',
'src/core/ext/transport/chttp2/transport/hpack_table.cc',
'src/core/ext/transport/chttp2/transport/http2_settings.cc',
'src/core/ext/transport/chttp2/transport/huffsyms.cc',
'src/core/ext/transport/chttp2/transport/incoming_metadata.cc',
'src/core/ext/transport/chttp2/transport/parsing.cc',
'src/core/ext/transport/chttp2/transport/stream_lists.cc',
'src/core/ext/transport/chttp2/transport/stream_map.cc',
'src/core/ext/transport/chttp2/transport/varint.cc',
'src/core/ext/transport/chttp2/transport/writing.cc',
'src/core/ext/transport/inproc/inproc_plugin.cc',
'src/core/ext/transport/inproc/inproc_transport.cc',
'src/core/ext/upb-generated/envoy/annotations/deprecation.upb.c',
'src/core/ext/upb-generated/envoy/annotations/resource.upb.c',
'src/core/ext/upb-generated/envoy/api/v2/auth/cert.upb.c',
'src/core/ext/upb-generated/envoy/api/v2/cds.upb.c',
'src/core/ext/upb-generated/envoy/api/v2/cluster.upb.c',
'src/core/ext/upb-generated/envoy/api/v2/cluster/circuit_breaker.upb.c',
'src/core/ext/upb-generated/envoy/api/v2/cluster/filter.upb.c',
'src/core/ext/upb-generated/envoy/api/v2/cluster/outlier_detection.upb.c',
'src/core/ext/upb-generated/envoy/api/v2/core/address.upb.c',
'src/core/ext/upb-generated/envoy/api/v2/core/base.upb.c',
'src/core/ext/upb-generated/envoy/api/v2/core/config_source.upb.c',
'src/core/ext/upb-generated/envoy/api/v2/core/grpc_service.upb.c',
'src/core/ext/upb-generated/envoy/api/v2/core/health_check.upb.c',
'src/core/ext/upb-generated/envoy/api/v2/core/http_uri.upb.c',
'src/core/ext/upb-generated/envoy/api/v2/core/protocol.upb.c',
'src/core/ext/upb-generated/envoy/api/v2/discovery.upb.c',
'src/core/ext/upb-generated/envoy/api/v2/eds.upb.c',
'src/core/ext/upb-generated/envoy/api/v2/endpoint.upb.c',
'src/core/ext/upb-generated/envoy/api/v2/endpoint/endpoint.upb.c',
'src/core/ext/upb-generated/envoy/api/v2/endpoint/endpoint_components.upb.c',
'src/core/ext/upb-generated/envoy/api/v2/endpoint/load_report.upb.c',
'src/core/ext/upb-generated/envoy/api/v2/lds.upb.c',
'src/core/ext/upb-generated/envoy/api/v2/listener.upb.c',
'src/core/ext/upb-generated/envoy/api/v2/listener/listener.upb.c',
'src/core/ext/upb-generated/envoy/api/v2/listener/listener_components.upb.c',
'src/core/ext/upb-generated/envoy/api/v2/listener/udp_listener_config.upb.c',
'src/core/ext/upb-generated/envoy/api/v2/rds.upb.c',
'src/core/ext/upb-generated/envoy/api/v2/route.upb.c',
'src/core/ext/upb-generated/envoy/api/v2/route/route.upb.c',
'src/core/ext/upb-generated/envoy/api/v2/route/route_components.upb.c',
'src/core/ext/upb-generated/envoy/api/v2/scoped_route.upb.c',
'src/core/ext/upb-generated/envoy/api/v2/srds.upb.c',
'src/core/ext/upb-generated/envoy/config/filter/accesslog/v2/accesslog.upb.c',
'src/core/ext/upb-generated/envoy/config/filter/network/http_connection_manager/v2/http_connection_manager.upb.c',
'src/core/ext/upb-generated/envoy/config/listener/v2/api_listener.upb.c',
'src/core/ext/upb-generated/envoy/service/discovery/v2/ads.upb.c',
'src/core/ext/upb-generated/envoy/service/load_stats/v2/lrs.upb.c',
'src/core/ext/upb-generated/envoy/type/http.upb.c',
'src/core/ext/upb-generated/envoy/type/matcher/regex.upb.c',
'src/core/ext/upb-generated/envoy/type/matcher/string.upb.c',
'src/core/ext/upb-generated/envoy/type/metadata/v2/metadata.upb.c',
'src/core/ext/upb-generated/envoy/type/percent.upb.c',
'src/core/ext/upb-generated/envoy/type/range.upb.c',
'src/core/ext/upb-generated/envoy/type/semantic_version.upb.c',
'src/core/ext/upb-generated/envoy/type/tracing/v2/custom_tag.upb.c',
'src/core/ext/upb-generated/gogoproto/gogo.upb.c',
'src/core/ext/upb-generated/google/api/annotations.upb.c',
'src/core/ext/upb-generated/google/api/http.upb.c',
'src/core/ext/upb-generated/google/protobuf/any.upb.c',
'src/core/ext/upb-generated/google/protobuf/descriptor.upb.c',
'src/core/ext/upb-generated/google/protobuf/duration.upb.c',
'src/core/ext/upb-generated/google/protobuf/empty.upb.c',
'src/core/ext/upb-generated/google/protobuf/struct.upb.c',
'src/core/ext/upb-generated/google/protobuf/timestamp.upb.c',
'src/core/ext/upb-generated/google/protobuf/wrappers.upb.c',
'src/core/ext/upb-generated/google/rpc/status.upb.c',
'src/core/ext/upb-generated/src/proto/grpc/gcp/altscontext.upb.c',
'src/core/ext/upb-generated/src/proto/grpc/gcp/handshaker.upb.c',
'src/core/ext/upb-generated/src/proto/grpc/gcp/transport_security_common.upb.c',
'src/core/ext/upb-generated/src/proto/grpc/health/v1/health.upb.c',
'src/core/ext/upb-generated/src/proto/grpc/lb/v1/load_balancer.upb.c',
'src/core/ext/upb-generated/udpa/annotations/migrate.upb.c',
'src/core/ext/upb-generated/udpa/annotations/sensitive.upb.c',
'src/core/ext/upb-generated/udpa/data/orca/v1/orca_load_report.upb.c',
'src/core/ext/upb-generated/validate/validate.upb.c',
'src/core/lib/avl/avl.cc',
'src/core/lib/backoff/backoff.cc',
'src/core/lib/channel/channel_args.cc',
'src/core/lib/channel/channel_stack.cc',
'src/core/lib/channel/channel_stack_builder.cc',
'src/core/lib/channel/channel_trace.cc',
'src/core/lib/channel/channelz.cc',
'src/core/lib/channel/channelz_registry.cc',
'src/core/lib/channel/connected_channel.cc',
'src/core/lib/channel/handshaker.cc',
'src/core/lib/channel/handshaker_registry.cc',
'src/core/lib/channel/status_util.cc',
'src/core/lib/compression/compression.cc',
'src/core/lib/compression/compression_args.cc',
'src/core/lib/compression/compression_internal.cc',
'src/core/lib/compression/message_compress.cc',
'src/core/lib/compression/stream_compression.cc',
'src/core/lib/compression/stream_compression_gzip.cc',
'src/core/lib/compression/stream_compression_identity.cc',
'src/core/lib/debug/stats.cc',
'src/core/lib/debug/stats_data.cc',
'src/core/lib/debug/trace.cc',
'src/core/lib/gpr/alloc.cc',
'src/core/lib/gpr/atm.cc',
'src/core/lib/gpr/cpu_iphone.cc',
'src/core/lib/gpr/cpu_linux.cc',
'src/core/lib/gpr/cpu_posix.cc',
'src/core/lib/gpr/cpu_windows.cc',
'src/core/lib/gpr/env_linux.cc',
'src/core/lib/gpr/env_posix.cc',
'src/core/lib/gpr/env_windows.cc',
'src/core/lib/gpr/log.cc',
'src/core/lib/gpr/log_android.cc',
'src/core/lib/gpr/log_linux.cc',
'src/core/lib/gpr/log_posix.cc',
'src/core/lib/gpr/log_windows.cc',
'src/core/lib/gpr/murmur_hash.cc',
'src/core/lib/gpr/string.cc',
'src/core/lib/gpr/string_posix.cc',
'src/core/lib/gpr/string_util_windows.cc',
'src/core/lib/gpr/string_windows.cc',
'src/core/lib/gpr/sync.cc',
'src/core/lib/gpr/sync_abseil.cc',
'src/core/lib/gpr/sync_posix.cc',
'src/core/lib/gpr/sync_windows.cc',
'src/core/lib/gpr/time.cc',
'src/core/lib/gpr/time_posix.cc',
'src/core/lib/gpr/time_precise.cc',
'src/core/lib/gpr/time_windows.cc',
'src/core/lib/gpr/tls_pthread.cc',
'src/core/lib/gpr/tmpfile_msys.cc',
'src/core/lib/gpr/tmpfile_posix.cc',
'src/core/lib/gpr/tmpfile_windows.cc',
'src/core/lib/gpr/wrap_memcpy.cc',
'src/core/lib/gprpp/arena.cc',
'src/core/lib/gprpp/fork.cc',
'src/core/lib/gprpp/global_config_env.cc',
'src/core/lib/gprpp/host_port.cc',
'src/core/lib/gprpp/mpscq.cc',
'src/core/lib/gprpp/thd_posix.cc',
'src/core/lib/gprpp/thd_windows.cc',
'src/core/lib/http/format_request.cc',
'src/core/lib/http/httpcli.cc',
'src/core/lib/http/httpcli_security_connector.cc',
'src/core/lib/http/parser.cc',
'src/core/lib/iomgr/buffer_list.cc',
'src/core/lib/iomgr/call_combiner.cc',
'src/core/lib/iomgr/cfstream_handle.cc',
'src/core/lib/iomgr/combiner.cc',
'src/core/lib/iomgr/endpoint.cc',
'src/core/lib/iomgr/endpoint_cfstream.cc',
'src/core/lib/iomgr/endpoint_pair_posix.cc',
'src/core/lib/iomgr/endpoint_pair_uv.cc',
'src/core/lib/iomgr/endpoint_pair_windows.cc',
'src/core/lib/iomgr/error.cc',
'src/core/lib/iomgr/error_cfstream.cc',
'src/core/lib/iomgr/ev_epoll1_linux.cc',
'src/core/lib/iomgr/ev_epollex_linux.cc',
'src/core/lib/iomgr/ev_poll_posix.cc',
'src/core/lib/iomgr/ev_posix.cc',
'src/core/lib/iomgr/ev_windows.cc',
'src/core/lib/iomgr/exec_ctx.cc',
'src/core/lib/iomgr/executor.cc',
'src/core/lib/iomgr/executor/mpmcqueue.cc',
'src/core/lib/iomgr/executor/threadpool.cc',
'src/core/lib/iomgr/fork_posix.cc',
'src/core/lib/iomgr/fork_windows.cc',
'src/core/lib/iomgr/gethostname_fallback.cc',
'src/core/lib/iomgr/gethostname_host_name_max.cc',
'src/core/lib/iomgr/gethostname_sysconf.cc',
'src/core/lib/iomgr/grpc_if_nametoindex_posix.cc',
'src/core/lib/iomgr/grpc_if_nametoindex_unsupported.cc',
'src/core/lib/iomgr/internal_errqueue.cc',
'src/core/lib/iomgr/iocp_windows.cc',
'src/core/lib/iomgr/iomgr.cc',
'src/core/lib/iomgr/iomgr_custom.cc',
'src/core/lib/iomgr/iomgr_internal.cc',
'src/core/lib/iomgr/iomgr_posix.cc',
'src/core/lib/iomgr/iomgr_posix_cfstream.cc',
'src/core/lib/iomgr/iomgr_uv.cc',
'src/core/lib/iomgr/iomgr_windows.cc',
'src/core/lib/iomgr/is_epollexclusive_available.cc',
'src/core/lib/iomgr/load_file.cc',
'src/core/lib/iomgr/lockfree_event.cc',
'src/core/lib/iomgr/poller/eventmanager_libuv.cc',
'src/core/lib/iomgr/polling_entity.cc',
'src/core/lib/iomgr/pollset.cc',
'src/core/lib/iomgr/pollset_custom.cc',
'src/core/lib/iomgr/pollset_set.cc',
'src/core/lib/iomgr/pollset_set_custom.cc',
'src/core/lib/iomgr/pollset_set_windows.cc',
'src/core/lib/iomgr/pollset_uv.cc',
'src/core/lib/iomgr/pollset_windows.cc',
'src/core/lib/iomgr/resolve_address.cc',
'src/core/lib/iomgr/resolve_address_custom.cc',
'src/core/lib/iomgr/resolve_address_posix.cc',
'src/core/lib/iomgr/resolve_address_windows.cc',
'src/core/lib/iomgr/resource_quota.cc',
'src/core/lib/iomgr/sockaddr_utils.cc',
'src/core/lib/iomgr/socket_factory_posix.cc',
'src/core/lib/iomgr/socket_mutator.cc',
'src/core/lib/iomgr/socket_utils_common_posix.cc',
'src/core/lib/iomgr/socket_utils_linux.cc',
'src/core/lib/iomgr/socket_utils_posix.cc',
'src/core/lib/iomgr/socket_utils_uv.cc',
'src/core/lib/iomgr/socket_utils_windows.cc',
'src/core/lib/iomgr/socket_windows.cc',
'src/core/lib/iomgr/tcp_client.cc',
'src/core/lib/iomgr/tcp_client_cfstream.cc',
'src/core/lib/iomgr/tcp_client_custom.cc',
'src/core/lib/iomgr/tcp_client_posix.cc',
'src/core/lib/iomgr/tcp_client_windows.cc',
'src/core/lib/iomgr/tcp_custom.cc',
'src/core/lib/iomgr/tcp_posix.cc',
'src/core/lib/iomgr/tcp_server.cc',
'src/core/lib/iomgr/tcp_server_custom.cc',
'src/core/lib/iomgr/tcp_server_posix.cc',
'src/core/lib/iomgr/tcp_server_utils_posix_common.cc',
'src/core/lib/iomgr/tcp_server_utils_posix_ifaddrs.cc',
'src/core/lib/iomgr/tcp_server_utils_posix_noifaddrs.cc',
'src/core/lib/iomgr/tcp_server_windows.cc',
'src/core/lib/iomgr/tcp_uv.cc',
'src/core/lib/iomgr/tcp_windows.cc',
'src/core/lib/iomgr/time_averaged_stats.cc',
'src/core/lib/iomgr/timer.cc',
'src/core/lib/iomgr/timer_custom.cc',
'src/core/lib/iomgr/timer_generic.cc',
'src/core/lib/iomgr/timer_heap.cc',
'src/core/lib/iomgr/timer_manager.cc',
'src/core/lib/iomgr/timer_uv.cc',
'src/core/lib/iomgr/udp_server.cc',
'src/core/lib/iomgr/unix_sockets_posix.cc',
'src/core/lib/iomgr/unix_sockets_posix_noop.cc',
'src/core/lib/iomgr/wakeup_fd_eventfd.cc',
'src/core/lib/iomgr/wakeup_fd_nospecial.cc',
'src/core/lib/iomgr/wakeup_fd_pipe.cc',
'src/core/lib/iomgr/wakeup_fd_posix.cc',
'src/core/lib/iomgr/work_serializer.cc',
'src/core/lib/json/json_reader.cc',
'src/core/lib/json/json_writer.cc',
'src/core/lib/profiling/basic_timers.cc',
'src/core/lib/profiling/stap_timers.cc',
'src/core/lib/security/context/security_context.cc',
'src/core/lib/security/credentials/alts/alts_credentials.cc',
'src/core/lib/security/credentials/alts/check_gcp_environment.cc',
'src/core/lib/security/credentials/alts/check_gcp_environment_linux.cc',
'src/core/lib/security/credentials/alts/check_gcp_environment_no_op.cc',
'src/core/lib/security/credentials/alts/check_gcp_environment_windows.cc',
'src/core/lib/security/credentials/alts/grpc_alts_credentials_client_options.cc',
'src/core/lib/security/credentials/alts/grpc_alts_credentials_options.cc',
'src/core/lib/security/credentials/alts/grpc_alts_credentials_server_options.cc',
'src/core/lib/security/credentials/composite/composite_credentials.cc',
'src/core/lib/security/credentials/credentials.cc',
'src/core/lib/security/credentials/credentials_metadata.cc',
'src/core/lib/security/credentials/fake/fake_credentials.cc',
'src/core/lib/security/credentials/google_default/credentials_generic.cc',
'src/core/lib/security/credentials/google_default/google_default_credentials.cc',
'src/core/lib/security/credentials/iam/iam_credentials.cc',
'src/core/lib/security/credentials/jwt/json_token.cc',
'src/core/lib/security/credentials/jwt/jwt_credentials.cc',
'src/core/lib/security/credentials/jwt/jwt_verifier.cc',
'src/core/lib/security/credentials/local/local_credentials.cc',
'src/core/lib/security/credentials/oauth2/oauth2_credentials.cc',
'src/core/lib/security/credentials/plugin/plugin_credentials.cc',
'src/core/lib/security/credentials/ssl/ssl_credentials.cc',
'src/core/lib/security/credentials/tls/grpc_tls_credentials_options.cc',
'src/core/lib/security/credentials/tls/tls_credentials.cc',
'src/core/lib/security/security_connector/alts/alts_security_connector.cc',
'src/core/lib/security/security_connector/fake/fake_security_connector.cc',
'src/core/lib/security/security_connector/load_system_roots_fallback.cc',
'src/core/lib/security/security_connector/load_system_roots_linux.cc',
'src/core/lib/security/security_connector/local/local_security_connector.cc',
'src/core/lib/security/security_connector/security_connector.cc',
'src/core/lib/security/security_connector/ssl/ssl_security_connector.cc',
'src/core/lib/security/security_connector/ssl_utils.cc',
'src/core/lib/security/security_connector/ssl_utils_config.cc',
'src/core/lib/security/security_connector/tls/tls_security_connector.cc',
'src/core/lib/security/transport/client_auth_filter.cc',
'src/core/lib/security/transport/secure_endpoint.cc',
'src/core/lib/security/transport/security_handshaker.cc',
'src/core/lib/security/transport/server_auth_filter.cc',
'src/core/lib/security/transport/target_authority_table.cc',
'src/core/lib/security/transport/tsi_error.cc',
'src/core/lib/security/util/json_util.cc',
'src/core/lib/slice/b64.cc',
'src/core/lib/slice/percent_encoding.cc',
'src/core/lib/slice/slice.cc',
'src/core/lib/slice/slice_buffer.cc',
'src/core/lib/slice/slice_intern.cc',
'src/core/lib/slice/slice_string_helpers.cc',
'src/core/lib/surface/api_trace.cc',
'src/core/lib/surface/byte_buffer.cc',
'src/core/lib/surface/byte_buffer_reader.cc',
'src/core/lib/surface/call.cc',
'src/core/lib/surface/call_details.cc',
'src/core/lib/surface/call_log_batch.cc',
'src/core/lib/surface/channel.cc',
'src/core/lib/surface/channel_init.cc',
'src/core/lib/surface/channel_ping.cc',
'src/core/lib/surface/channel_stack_type.cc',
'src/core/lib/surface/completion_queue.cc',
'src/core/lib/surface/completion_queue_factory.cc',
'src/core/lib/surface/event_string.cc',
'src/core/lib/surface/init.cc',
'src/core/lib/surface/init_secure.cc',
'src/core/lib/surface/lame_client.cc',
'src/core/lib/surface/metadata_array.cc',
'src/core/lib/surface/server.cc',
'src/core/lib/surface/validate_metadata.cc',
'src/core/lib/surface/version.cc',
'src/core/lib/transport/bdp_estimator.cc',
'src/core/lib/transport/byte_stream.cc',
'src/core/lib/transport/connectivity_state.cc',
'src/core/lib/transport/error_utils.cc',
'src/core/lib/transport/metadata.cc',
'src/core/lib/transport/metadata_batch.cc',
'src/core/lib/transport/pid_controller.cc',
'src/core/lib/transport/static_metadata.cc',
'src/core/lib/transport/status_conversion.cc',
'src/core/lib/transport/status_metadata.cc',
'src/core/lib/transport/timeout_encoding.cc',
'src/core/lib/transport/transport.cc',
'src/core/lib/transport/transport_op_string.cc',
'src/core/lib/uri/uri_parser.cc',
'src/core/plugin_registry/grpc_plugin_registry.cc',
'src/core/tsi/alts/crypt/aes_gcm.cc',
'src/core/tsi/alts/crypt/gsec.cc',
'src/core/tsi/alts/frame_protector/alts_counter.cc',
'src/core/tsi/alts/frame_protector/alts_crypter.cc',
'src/core/tsi/alts/frame_protector/alts_frame_protector.cc',
'src/core/tsi/alts/frame_protector/alts_record_protocol_crypter_common.cc',
'src/core/tsi/alts/frame_protector/alts_seal_privacy_integrity_crypter.cc',
'src/core/tsi/alts/frame_protector/alts_unseal_privacy_integrity_crypter.cc',
'src/core/tsi/alts/frame_protector/frame_handler.cc',
'src/core/tsi/alts/handshaker/alts_handshaker_client.cc',
'src/core/tsi/alts/handshaker/alts_shared_resource.cc',
'src/core/tsi/alts/handshaker/alts_tsi_handshaker.cc',
'src/core/tsi/alts/handshaker/alts_tsi_utils.cc',
'src/core/tsi/alts/handshaker/transport_security_common_api.cc',
'src/core/tsi/alts/zero_copy_frame_protector/alts_grpc_integrity_only_record_protocol.cc',
'src/core/tsi/alts/zero_copy_frame_protector/alts_grpc_privacy_integrity_record_protocol.cc',
'src/core/tsi/alts/zero_copy_frame_protector/alts_grpc_record_protocol_common.cc',
'src/core/tsi/alts/zero_copy_frame_protector/alts_iovec_record_protocol.cc',
'src/core/tsi/alts/zero_copy_frame_protector/alts_zero_copy_grpc_protector.cc',
'src/core/tsi/fake_transport_security.cc',
'src/core/tsi/local_transport_security.cc',
'src/core/tsi/ssl/session_cache/ssl_session_boringssl.cc',
'src/core/tsi/ssl/session_cache/ssl_session_cache.cc',
'src/core/tsi/ssl/session_cache/ssl_session_openssl.cc',
'src/core/tsi/ssl_transport_security.cc',
'src/core/tsi/transport_security.cc',
'src/core/tsi/transport_security_grpc.cc',
'third_party/abseil-cpp/absl/base/dynamic_annotations.cc',
'third_party/abseil-cpp/absl/base/internal/cycleclock.cc',
'third_party/abseil-cpp/absl/base/internal/raw_logging.cc',
'third_party/abseil-cpp/absl/base/internal/spinlock.cc',
'third_party/abseil-cpp/absl/base/internal/spinlock_wait.cc',
'third_party/abseil-cpp/absl/base/internal/sysinfo.cc',
'third_party/abseil-cpp/absl/base/internal/thread_identity.cc',
'third_party/abseil-cpp/absl/base/internal/throw_delegate.cc',
'third_party/abseil-cpp/absl/base/internal/unscaledcycleclock.cc',
'third_party/abseil-cpp/absl/base/log_severity.cc',
'third_party/abseil-cpp/absl/numeric/int128.cc',
'third_party/abseil-cpp/absl/strings/ascii.cc',
'third_party/abseil-cpp/absl/strings/charconv.cc',
'third_party/abseil-cpp/absl/strings/escaping.cc',
'third_party/abseil-cpp/absl/strings/internal/charconv_bigint.cc',
'third_party/abseil-cpp/absl/strings/internal/charconv_parse.cc',
'third_party/abseil-cpp/absl/strings/internal/escaping.cc',
'third_party/abseil-cpp/absl/strings/internal/memutil.cc',
'third_party/abseil-cpp/absl/strings/internal/ostringstream.cc',
'third_party/abseil-cpp/absl/strings/internal/str_format/arg.cc',
'third_party/abseil-cpp/absl/strings/internal/str_format/bind.cc',
'third_party/abseil-cpp/absl/strings/internal/str_format/extension.cc',
'third_party/abseil-cpp/absl/strings/internal/str_format/float_conversion.cc',
'third_party/abseil-cpp/absl/strings/internal/str_format/output.cc',
'third_party/abseil-cpp/absl/strings/internal/str_format/parser.cc',
'third_party/abseil-cpp/absl/strings/internal/utf8.cc',
'third_party/abseil-cpp/absl/strings/match.cc',
'third_party/abseil-cpp/absl/strings/numbers.cc',
'third_party/abseil-cpp/absl/strings/str_cat.cc',
'third_party/abseil-cpp/absl/strings/str_replace.cc',
'third_party/abseil-cpp/absl/strings/str_split.cc',
'third_party/abseil-cpp/absl/strings/string_view.cc',
'third_party/abseil-cpp/absl/strings/substitute.cc',
'third_party/abseil-cpp/absl/types/bad_optional_access.cc',
'third_party/address_sorting/address_sorting.c',
'third_party/address_sorting/address_sorting_posix.c',
'third_party/address_sorting/address_sorting_windows.c',
'third_party/boringssl-with-bazel/err_data.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/a_bitstr.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/a_bool.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/a_d2i_fp.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/a_dup.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/a_enum.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/a_gentm.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/a_i2d_fp.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/a_int.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/a_mbstr.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/a_object.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/a_octet.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/a_print.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/a_strnid.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/a_time.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/a_type.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/a_utctm.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/a_utf8.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/asn1_lib.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/asn1_par.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/asn_pack.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/f_enum.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/f_int.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/f_string.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/tasn_dec.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/tasn_enc.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/tasn_fre.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/tasn_new.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/tasn_typ.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/tasn_utl.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/time_support.c',
'third_party/boringssl-with-bazel/src/crypto/base64/base64.c',
'third_party/boringssl-with-bazel/src/crypto/bio/bio.c',
'third_party/boringssl-with-bazel/src/crypto/bio/bio_mem.c',
'third_party/boringssl-with-bazel/src/crypto/bio/connect.c',
'third_party/boringssl-with-bazel/src/crypto/bio/fd.c',
'third_party/boringssl-with-bazel/src/crypto/bio/file.c',
'third_party/boringssl-with-bazel/src/crypto/bio/hexdump.c',
'third_party/boringssl-with-bazel/src/crypto/bio/pair.c',
'third_party/boringssl-with-bazel/src/crypto/bio/printf.c',
'third_party/boringssl-with-bazel/src/crypto/bio/socket.c',
'third_party/boringssl-with-bazel/src/crypto/bio/socket_helper.c',
'third_party/boringssl-with-bazel/src/crypto/bn_extra/bn_asn1.c',
'third_party/boringssl-with-bazel/src/crypto/bn_extra/convert.c',
'third_party/boringssl-with-bazel/src/crypto/buf/buf.c',
'third_party/boringssl-with-bazel/src/crypto/bytestring/asn1_compat.c',
'third_party/boringssl-with-bazel/src/crypto/bytestring/ber.c',
'third_party/boringssl-with-bazel/src/crypto/bytestring/cbb.c',
'third_party/boringssl-with-bazel/src/crypto/bytestring/cbs.c',
'third_party/boringssl-with-bazel/src/crypto/bytestring/unicode.c',
'third_party/boringssl-with-bazel/src/crypto/chacha/chacha.c',
'third_party/boringssl-with-bazel/src/crypto/cipher_extra/cipher_extra.c',
'third_party/boringssl-with-bazel/src/crypto/cipher_extra/derive_key.c',
'third_party/boringssl-with-bazel/src/crypto/cipher_extra/e_aesccm.c',
'third_party/boringssl-with-bazel/src/crypto/cipher_extra/e_aesctrhmac.c',
'third_party/boringssl-with-bazel/src/crypto/cipher_extra/e_aesgcmsiv.c',
'third_party/boringssl-with-bazel/src/crypto/cipher_extra/e_chacha20poly1305.c',
'third_party/boringssl-with-bazel/src/crypto/cipher_extra/e_null.c',
'third_party/boringssl-with-bazel/src/crypto/cipher_extra/e_rc2.c',
'third_party/boringssl-with-bazel/src/crypto/cipher_extra/e_rc4.c',
'third_party/boringssl-with-bazel/src/crypto/cipher_extra/e_tls.c',
'third_party/boringssl-with-bazel/src/crypto/cipher_extra/tls_cbc.c',
'third_party/boringssl-with-bazel/src/crypto/cmac/cmac.c',
'third_party/boringssl-with-bazel/src/crypto/conf/conf.c',
'third_party/boringssl-with-bazel/src/crypto/cpu-aarch64-fuchsia.c',
'third_party/boringssl-with-bazel/src/crypto/cpu-aarch64-linux.c',
'third_party/boringssl-with-bazel/src/crypto/cpu-arm-linux.c',
'third_party/boringssl-with-bazel/src/crypto/cpu-arm.c',
'third_party/boringssl-with-bazel/src/crypto/cpu-intel.c',
'third_party/boringssl-with-bazel/src/crypto/cpu-ppc64le.c',
'third_party/boringssl-with-bazel/src/crypto/crypto.c',
'third_party/boringssl-with-bazel/src/crypto/curve25519/spake25519.c',
'third_party/boringssl-with-bazel/src/crypto/dh/check.c',
'third_party/boringssl-with-bazel/src/crypto/dh/dh.c',
'third_party/boringssl-with-bazel/src/crypto/dh/dh_asn1.c',
'third_party/boringssl-with-bazel/src/crypto/dh/params.c',
'third_party/boringssl-with-bazel/src/crypto/digest_extra/digest_extra.c',
'third_party/boringssl-with-bazel/src/crypto/dsa/dsa.c',
'third_party/boringssl-with-bazel/src/crypto/dsa/dsa_asn1.c',
'third_party/boringssl-with-bazel/src/crypto/ec_extra/ec_asn1.c',
'third_party/boringssl-with-bazel/src/crypto/ec_extra/ec_derive.c',
'third_party/boringssl-with-bazel/src/crypto/ecdh_extra/ecdh_extra.c',
'third_party/boringssl-with-bazel/src/crypto/ecdsa_extra/ecdsa_asn1.c',
'third_party/boringssl-with-bazel/src/crypto/engine/engine.c',
'third_party/boringssl-with-bazel/src/crypto/err/err.c',
'third_party/boringssl-with-bazel/src/crypto/evp/digestsign.c',
'third_party/boringssl-with-bazel/src/crypto/evp/evp.c',
'third_party/boringssl-with-bazel/src/crypto/evp/evp_asn1.c',
'third_party/boringssl-with-bazel/src/crypto/evp/evp_ctx.c',
'third_party/boringssl-with-bazel/src/crypto/evp/p_dsa_asn1.c',
'third_party/boringssl-with-bazel/src/crypto/evp/p_ec.c',
'third_party/boringssl-with-bazel/src/crypto/evp/p_ec_asn1.c',
'third_party/boringssl-with-bazel/src/crypto/evp/p_ed25519.c',
'third_party/boringssl-with-bazel/src/crypto/evp/p_ed25519_asn1.c',
'third_party/boringssl-with-bazel/src/crypto/evp/p_rsa.c',
'third_party/boringssl-with-bazel/src/crypto/evp/p_rsa_asn1.c',
'third_party/boringssl-with-bazel/src/crypto/evp/p_x25519.c',
'third_party/boringssl-with-bazel/src/crypto/evp/p_x25519_asn1.c',
'third_party/boringssl-with-bazel/src/crypto/evp/pbkdf.c',
'third_party/boringssl-with-bazel/src/crypto/evp/print.c',
'third_party/boringssl-with-bazel/src/crypto/evp/scrypt.c',
'third_party/boringssl-with-bazel/src/crypto/evp/sign.c',
'third_party/boringssl-with-bazel/src/crypto/ex_data.c',
'third_party/boringssl-with-bazel/src/crypto/fipsmodule/bcm.c',
'third_party/boringssl-with-bazel/src/crypto/fipsmodule/fips_shared_support.c',
'third_party/boringssl-with-bazel/src/crypto/fipsmodule/is_fips.c',
'third_party/boringssl-with-bazel/src/crypto/hkdf/hkdf.c',
'third_party/boringssl-with-bazel/src/crypto/hrss/hrss.c',
'third_party/boringssl-with-bazel/src/crypto/lhash/lhash.c',
'third_party/boringssl-with-bazel/src/crypto/mem.c',
'third_party/boringssl-with-bazel/src/crypto/obj/obj.c',
'third_party/boringssl-with-bazel/src/crypto/obj/obj_xref.c',
'third_party/boringssl-with-bazel/src/crypto/pem/pem_all.c',
'third_party/boringssl-with-bazel/src/crypto/pem/pem_info.c',
'third_party/boringssl-with-bazel/src/crypto/pem/pem_lib.c',
'third_party/boringssl-with-bazel/src/crypto/pem/pem_oth.c',
'third_party/boringssl-with-bazel/src/crypto/pem/pem_pk8.c',
'third_party/boringssl-with-bazel/src/crypto/pem/pem_pkey.c',
'third_party/boringssl-with-bazel/src/crypto/pem/pem_x509.c',
'third_party/boringssl-with-bazel/src/crypto/pem/pem_xaux.c',
'third_party/boringssl-with-bazel/src/crypto/pkcs7/pkcs7.c',
'third_party/boringssl-with-bazel/src/crypto/pkcs7/pkcs7_x509.c',
'third_party/boringssl-with-bazel/src/crypto/pkcs8/p5_pbev2.c',
'third_party/boringssl-with-bazel/src/crypto/pkcs8/pkcs8.c',
'third_party/boringssl-with-bazel/src/crypto/pkcs8/pkcs8_x509.c',
'third_party/boringssl-with-bazel/src/crypto/poly1305/poly1305.c',
'third_party/boringssl-with-bazel/src/crypto/poly1305/poly1305_arm.c',
'third_party/boringssl-with-bazel/src/crypto/poly1305/poly1305_vec.c',
'third_party/boringssl-with-bazel/src/crypto/pool/pool.c',
'third_party/boringssl-with-bazel/src/crypto/rand_extra/deterministic.c',
'third_party/boringssl-with-bazel/src/crypto/rand_extra/forkunsafe.c',
'third_party/boringssl-with-bazel/src/crypto/rand_extra/fuchsia.c',
'third_party/boringssl-with-bazel/src/crypto/rand_extra/rand_extra.c',
'third_party/boringssl-with-bazel/src/crypto/rand_extra/windows.c',
'third_party/boringssl-with-bazel/src/crypto/rc4/rc4.c',
'third_party/boringssl-with-bazel/src/crypto/refcount_c11.c',
'third_party/boringssl-with-bazel/src/crypto/refcount_lock.c',
'third_party/boringssl-with-bazel/src/crypto/rsa_extra/rsa_asn1.c',
'third_party/boringssl-with-bazel/src/crypto/rsa_extra/rsa_print.c',
'third_party/boringssl-with-bazel/src/crypto/siphash/siphash.c',
'third_party/boringssl-with-bazel/src/crypto/stack/stack.c',
'third_party/boringssl-with-bazel/src/crypto/thread.c',
'third_party/boringssl-with-bazel/src/crypto/thread_none.c',
'third_party/boringssl-with-bazel/src/crypto/thread_pthread.c',
'third_party/boringssl-with-bazel/src/crypto/thread_win.c',
'third_party/boringssl-with-bazel/src/crypto/x509/a_digest.c',
'third_party/boringssl-with-bazel/src/crypto/x509/a_sign.c',
'third_party/boringssl-with-bazel/src/crypto/x509/a_strex.c',
'third_party/boringssl-with-bazel/src/crypto/x509/a_verify.c',
'third_party/boringssl-with-bazel/src/crypto/x509/algorithm.c',
'third_party/boringssl-with-bazel/src/crypto/x509/asn1_gen.c',
'third_party/boringssl-with-bazel/src/crypto/x509/by_dir.c',
'third_party/boringssl-with-bazel/src/crypto/x509/by_file.c',
'third_party/boringssl-with-bazel/src/crypto/x509/i2d_pr.c',
'third_party/boringssl-with-bazel/src/crypto/x509/rsa_pss.c',
'third_party/boringssl-with-bazel/src/crypto/x509/t_crl.c',
'third_party/boringssl-with-bazel/src/crypto/x509/t_req.c',
'third_party/boringssl-with-bazel/src/crypto/x509/t_x509.c',
'third_party/boringssl-with-bazel/src/crypto/x509/t_x509a.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x509.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x509_att.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x509_cmp.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x509_d2.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x509_def.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x509_ext.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x509_lu.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x509_obj.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x509_r2x.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x509_req.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x509_set.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x509_trs.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x509_txt.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x509_v3.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x509_vfy.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x509_vpm.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x509cset.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x509name.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x509rset.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x509spki.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x_algor.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x_all.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x_attrib.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x_crl.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x_exten.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x_info.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x_name.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x_pkey.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x_pubkey.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x_req.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x_sig.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x_spki.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x_val.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x_x509.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x_x509a.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/pcy_cache.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/pcy_data.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/pcy_lib.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/pcy_map.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/pcy_node.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/pcy_tree.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_akey.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_akeya.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_alt.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_bcons.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_bitst.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_conf.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_cpols.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_crld.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_enum.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_extku.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_genn.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_ia5.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_info.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_int.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_lib.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_ncons.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_ocsp.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_pci.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_pcia.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_pcons.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_pku.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_pmaps.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_prn.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_purp.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_skey.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_sxnet.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_utl.c',
'third_party/boringssl-with-bazel/src/ssl/bio_ssl.cc',
'third_party/boringssl-with-bazel/src/ssl/d1_both.cc',
'third_party/boringssl-with-bazel/src/ssl/d1_lib.cc',
'third_party/boringssl-with-bazel/src/ssl/d1_pkt.cc',
'third_party/boringssl-with-bazel/src/ssl/d1_srtp.cc',
'third_party/boringssl-with-bazel/src/ssl/dtls_method.cc',
'third_party/boringssl-with-bazel/src/ssl/dtls_record.cc',
'third_party/boringssl-with-bazel/src/ssl/handoff.cc',
'third_party/boringssl-with-bazel/src/ssl/handshake.cc',
'third_party/boringssl-with-bazel/src/ssl/handshake_client.cc',
'third_party/boringssl-with-bazel/src/ssl/handshake_server.cc',
'third_party/boringssl-with-bazel/src/ssl/s3_both.cc',
'third_party/boringssl-with-bazel/src/ssl/s3_lib.cc',
'third_party/boringssl-with-bazel/src/ssl/s3_pkt.cc',
'third_party/boringssl-with-bazel/src/ssl/ssl_aead_ctx.cc',
'third_party/boringssl-with-bazel/src/ssl/ssl_asn1.cc',
'third_party/boringssl-with-bazel/src/ssl/ssl_buffer.cc',
'third_party/boringssl-with-bazel/src/ssl/ssl_cert.cc',
'third_party/boringssl-with-bazel/src/ssl/ssl_cipher.cc',
'third_party/boringssl-with-bazel/src/ssl/ssl_file.cc',
'third_party/boringssl-with-bazel/src/ssl/ssl_key_share.cc',
'third_party/boringssl-with-bazel/src/ssl/ssl_lib.cc',
'third_party/boringssl-with-bazel/src/ssl/ssl_privkey.cc',
'third_party/boringssl-with-bazel/src/ssl/ssl_session.cc',
'third_party/boringssl-with-bazel/src/ssl/ssl_stat.cc',
'third_party/boringssl-with-bazel/src/ssl/ssl_transcript.cc',
'third_party/boringssl-with-bazel/src/ssl/ssl_versions.cc',
'third_party/boringssl-with-bazel/src/ssl/ssl_x509.cc',
'third_party/boringssl-with-bazel/src/ssl/t1_enc.cc',
'third_party/boringssl-with-bazel/src/ssl/t1_lib.cc',
'third_party/boringssl-with-bazel/src/ssl/tls13_both.cc',
'third_party/boringssl-with-bazel/src/ssl/tls13_client.cc',
'third_party/boringssl-with-bazel/src/ssl/tls13_enc.cc',
'third_party/boringssl-with-bazel/src/ssl/tls13_server.cc',
'third_party/boringssl-with-bazel/src/ssl/tls_method.cc',
'third_party/boringssl-with-bazel/src/ssl/tls_record.cc',
'third_party/boringssl-with-bazel/src/third_party/fiat/curve25519.c',
'third_party/cares/cares/ares__close_sockets.c',
'third_party/cares/cares/ares__get_hostent.c',
'third_party/cares/cares/ares__read_line.c',
'third_party/cares/cares/ares__timeval.c',
'third_party/cares/cares/ares_cancel.c',
'third_party/cares/cares/ares_create_query.c',
'third_party/cares/cares/ares_data.c',
'third_party/cares/cares/ares_destroy.c',
'third_party/cares/cares/ares_expand_name.c',
'third_party/cares/cares/ares_expand_string.c',
'third_party/cares/cares/ares_fds.c',
'third_party/cares/cares/ares_free_hostent.c',
'third_party/cares/cares/ares_free_string.c',
'third_party/cares/cares/ares_getenv.c',
'third_party/cares/cares/ares_gethostbyaddr.c',
'third_party/cares/cares/ares_gethostbyname.c',
'third_party/cares/cares/ares_getnameinfo.c',
'third_party/cares/cares/ares_getopt.c',
'third_party/cares/cares/ares_getsock.c',
'third_party/cares/cares/ares_init.c',
'third_party/cares/cares/ares_library_init.c',
'third_party/cares/cares/ares_llist.c',
'third_party/cares/cares/ares_mkquery.c',
'third_party/cares/cares/ares_nowarn.c',
'third_party/cares/cares/ares_options.c',
'third_party/cares/cares/ares_parse_a_reply.c',
'third_party/cares/cares/ares_parse_aaaa_reply.c',
'third_party/cares/cares/ares_parse_mx_reply.c',
'third_party/cares/cares/ares_parse_naptr_reply.c',
'third_party/cares/cares/ares_parse_ns_reply.c',
'third_party/cares/cares/ares_parse_ptr_reply.c',
'third_party/cares/cares/ares_parse_soa_reply.c',
'third_party/cares/cares/ares_parse_srv_reply.c',
'third_party/cares/cares/ares_parse_txt_reply.c',
'third_party/cares/cares/ares_platform.c',
'third_party/cares/cares/ares_process.c',
'third_party/cares/cares/ares_query.c',
'third_party/cares/cares/ares_search.c',
'third_party/cares/cares/ares_send.c',
'third_party/cares/cares/ares_strcasecmp.c',
'third_party/cares/cares/ares_strdup.c',
'third_party/cares/cares/ares_strerror.c',
'third_party/cares/cares/ares_strsplit.c',
'third_party/cares/cares/ares_timeout.c',
'third_party/cares/cares/ares_version.c',
'third_party/cares/cares/ares_writev.c',
'third_party/cares/cares/bitncmp.c',
'third_party/cares/cares/inet_net_pton.c',
'third_party/cares/cares/inet_ntop.c',
'third_party/cares/cares/windows_port.c',
'third_party/upb/upb/decode.c',
'third_party/upb/upb/encode.c',
'third_party/upb/upb/msg.c',
'third_party/upb/upb/port.c',
'third_party/upb/upb/table.c',
'third_party/upb/upb/upb.c',
'third_party/zlib/adler32.c',
'third_party/zlib/compress.c',
'third_party/zlib/crc32.c',
'third_party/zlib/deflate.c',
'third_party/zlib/gzclose.c',
'third_party/zlib/gzlib.c',
'third_party/zlib/gzread.c',
'third_party/zlib/gzwrite.c',
'third_party/zlib/infback.c',
'third_party/zlib/inffast.c',
'third_party/zlib/inflate.c',
'third_party/zlib/inftrees.c',
'third_party/zlib/trees.c',
'third_party/zlib/uncompr.c',
'third_party/zlib/zutil.c',
]
| apache-2.0 | -1,551,413,842,074,994,000 | 57.639324 | 118 | 0.723422 | false |
Yukarumya/Yukarum-Redfoxes | python/mozbuild/mozbuild/frontend/mach_commands.py | 2 | 7822 | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import absolute_import, print_function, unicode_literals
from collections import defaultdict
import os
from mach.decorators import (
CommandArgument,
CommandProvider,
Command,
SubCommand,
)
from mozbuild.base import MachCommandBase
import mozpack.path as mozpath
class InvalidPathException(Exception):
"""Represents an error due to an invalid path."""
@CommandProvider
class MozbuildFileCommands(MachCommandBase):
@Command('mozbuild-reference', category='build-dev',
description='View reference documentation on mozbuild files.')
@CommandArgument('symbol', default=None, nargs='*',
help='Symbol to view help on. If not specified, all will be shown.')
@CommandArgument('--name-only', '-n', default=False, action='store_true',
help='Print symbol names only.')
def reference(self, symbol, name_only=False):
# mozbuild.sphinx imports some Sphinx modules, so we need to be sure
# the optional Sphinx package is installed.
self._activate_virtualenv()
self.virtualenv_manager.install_pip_package('Sphinx==1.1.3')
from mozbuild.sphinx import (
format_module,
function_reference,
special_reference,
variable_reference,
)
import mozbuild.frontend.context as m
if name_only:
for s in sorted(m.VARIABLES.keys()):
print(s)
for s in sorted(m.FUNCTIONS.keys()):
print(s)
for s in sorted(m.SPECIAL_VARIABLES.keys()):
print(s)
return 0
if len(symbol):
for s in symbol:
if s in m.VARIABLES:
for line in variable_reference(s, *m.VARIABLES[s]):
print(line)
continue
elif s in m.FUNCTIONS:
for line in function_reference(s, *m.FUNCTIONS[s]):
print(line)
continue
elif s in m.SPECIAL_VARIABLES:
for line in special_reference(s, *m.SPECIAL_VARIABLES[s]):
print(line)
continue
print('Could not find symbol: %s' % s)
return 1
return 0
for line in format_module(m):
print(line)
return 0
@Command('file-info', category='build-dev',
description='Query for metadata about files.')
def file_info(self):
"""Show files metadata derived from moz.build files.
moz.build files contain "Files" sub-contexts for declaring metadata
against file patterns. This command suite is used to query that data.
"""
@SubCommand('file-info', 'bugzilla-component',
'Show Bugzilla component info for files listed.')
@CommandArgument('-r', '--rev',
help='Version control revision to look up info from')
@CommandArgument('paths', nargs='+',
help='Paths whose data to query')
def file_info_bugzilla(self, paths, rev=None):
"""Show Bugzilla component for a set of files.
Given a requested set of files (which can be specified using
wildcards), print the Bugzilla component for each file.
"""
components = defaultdict(set)
try:
for p, m in self._get_files_info(paths, rev=rev).items():
components[m.get('BUG_COMPONENT')].add(p)
except InvalidPathException as e:
print(e.message)
return 1
for component, files in sorted(components.items(), key=lambda x: (x is None, x)):
print('%s :: %s' % (component.product, component.component) if component else 'UNKNOWN')
for f in sorted(files):
print(' %s' % f)
@SubCommand('file-info', 'missing-bugzilla',
'Show files missing Bugzilla component info')
@CommandArgument('-r', '--rev',
help='Version control revision to look up info from')
@CommandArgument('paths', nargs='+',
help='Paths whose data to query')
def file_info_missing_bugzilla(self, paths, rev=None):
try:
for p, m in sorted(self._get_files_info(paths, rev=rev).items()):
if 'BUG_COMPONENT' not in m:
print(p)
except InvalidPathException as e:
print(e.message)
return 1
@SubCommand('file-info', 'dep-tests',
'Show test files marked as dependencies of these source files.')
@CommandArgument('-r', '--rev',
help='Version control revision to look up info from')
@CommandArgument('paths', nargs='+',
help='Paths whose data to query')
def file_info_test_deps(self, paths, rev=None):
try:
for p, m in self._get_files_info(paths, rev=rev).items():
print('%s:' % mozpath.relpath(p, self.topsrcdir))
if m.test_files:
print('\tTest file patterns:')
for p in m.test_files:
print('\t\t%s' % p)
if m.test_tags:
print('\tRelevant tags:')
for p in m.test_tags:
print('\t\t%s' % p)
if m.test_flavors:
print('\tRelevant flavors:')
for p in m.test_flavors:
print('\t\t%s' % p)
except InvalidPathException as e:
print(e.message)
return 1
def _get_reader(self, finder):
from mozbuild.frontend.reader import (
BuildReader,
EmptyConfig,
)
config = EmptyConfig(self.topsrcdir)
return BuildReader(config, finder=finder)
def _get_files_info(self, paths, rev=None):
from mozbuild.frontend.reader import default_finder
from mozpack.files import FileFinder, MercurialRevisionFinder
# Normalize to relative from topsrcdir.
relpaths = []
for p in paths:
a = mozpath.abspath(p)
if not mozpath.basedir(a, [self.topsrcdir]):
raise InvalidPathException('path is outside topsrcdir: %s' % p)
relpaths.append(mozpath.relpath(a, self.topsrcdir))
repo = None
if rev:
hg_path = os.path.join(self.topsrcdir, '.hg')
if not os.path.exists(hg_path):
raise InvalidPathException('a Mercurial repo is required '
'when specifying a revision')
repo = self.topsrcdir
# We need two finders because the reader's finder operates on
# absolute paths.
finder = FileFinder(self.topsrcdir, find_executables=False)
if repo:
reader_finder = MercurialRevisionFinder(repo, rev=rev,
recognize_repo_paths=True)
else:
reader_finder = default_finder
# Expand wildcards.
allpaths = []
for p in relpaths:
if '*' not in p:
if p not in allpaths:
allpaths.append(p)
continue
if repo:
raise InvalidPathException('cannot use wildcard in version control mode')
for path, f in finder.find(p):
if path not in allpaths:
allpaths.append(path)
reader = self._get_reader(finder=reader_finder)
return reader.files_info(allpaths)
| mpl-2.0 | -4,686,500,835,082,777,000 | 34.880734 | 100 | 0.557914 | false |
probml/pyprobml | scripts/svi_gmm_tfp_scratch.py | 1 | 7626 | # SVI for a GMM
# Modified from
# https://github.com/brendanhasz/svi-gaussian-mixture-model/blob/master/BayesianGaussianMixtureModel.ipynb
#pip install tf-nightly
#pip install --upgrade tfp-nightly -q
# Imports
import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns
import tensorflow as tf
import tensorflow_probability as tfp
tfd = tfp.distributions
from time import time
# Plot settings
#%config InlineBackend.figure_format = 'svg'
# Random seed
np.random.seed(12345)
tf.random.set_seed(12345)
# Generate some data
N = 3000
X = np.random.randn(N, 2).astype('float32')
X[:1000, :] += [2, 0]
X[1000:2000, :] -= [2, 4]
X[2000:, :] += [-2, 4]
# Plot the data
plt.plot(X[:, 0], X[:, 1], '.')
plt.axis('equal')
plt.show()
# Make a TensorFlow Dataset from that data
batch_size = 500
dataset = tf.data.Dataset.from_tensor_slices(
(X)).shuffle(10000).batch(batch_size)
class GaussianMixtureModel(tf.keras.Model):
"""A Bayesian Gaussian mixture model.
Assumes Gaussians' variances in each dimension are independent.
Parameters
----------
Nc : int > 0
Number of mixture components.
Nd : int > 0
Number of dimensions.
"""
def __init__(self, Nc, Nd):
# Initialize
super(GaussianMixtureModel, self).__init__()
self.Nc = Nc
self.Nd = Nd
# Variational distribution variables for means
self.locs = tf.Variable(tf.random.normal((Nc, Nd)))
self.scales = tf.Variable(tf.pow(tf.random.gamma((Nc, Nd), 5, 5), -0.5))
# Variational distribution variables for standard deviations
self.alpha = tf.Variable(tf.random.uniform((Nc, Nd), 4., 6.))
self.beta = tf.Variable(tf.random.uniform((Nc, Nd), 4., 6.))
# Variational distribution variables for component weights
self.counts = tf.Variable(2*tf.ones((Nc,)))
# Prior distributions for the means
self.mu_prior = tfd.Normal(tf.zeros((Nc, Nd)), tf.ones((Nc, Nd)))
# Prior distributions for the standard deviations
self.sigma_prior = tfd.Gamma(5*tf.ones((Nc, Nd)), 5*tf.ones((Nc, Nd)))
# Prior distributions for the component weights
self.theta_prior = tfd.Dirichlet(2*tf.ones((Nc,)))
def call(self, x, sampling=True, independent=True):
"""Compute losses given a batch of data.
Parameters
----------
x : tf.Tensor
A batch of data
sampling : bool
Whether to sample from the variational posterior
distributions (if True, the default), or just use the
mean of the variational distributions (if False).
Returns
-------
log_likelihoods : tf.Tensor
Log likelihood for each sample
kl_sum : tf.Tensor
Sum of the KL divergences between the variational
distributions and their priors
"""
# The variational distributions
mu = tfd.Normal(self.locs, self.scales)
sigma = tfd.Gamma(self.alpha, self.beta)
theta = tfd.Dirichlet(self.counts)
# Sample from the variational distributions
if sampling:
Nb = x.shape[0] #number of samples in the batch
mu_sample = mu.sample(Nb)
sigma_sample = tf.pow(sigma.sample(Nb), -0.5)
theta_sample = theta.sample(Nb)
else:
mu_sample = tf.reshape(mu.mean(), (1, self.Nc, self.Nd))
sigma_sample = tf.pow(tf.reshape(sigma.mean(), (1, self.Nc, self.Nd)), -0.5)
theta_sample = tf.reshape(theta.mean(), (1, self.Nc))
# The mixture density
density = tfd.Mixture(
cat=tfd.Categorical(probs=theta_sample),
components=[
tfd.MultivariateNormalDiag(loc=mu_sample[:, i, :],
scale_diag=sigma_sample[:, i, :])
for i in range(self.Nc)])
# Compute the mean log likelihood
log_likelihoods = density.log_prob(x)
# Compute the KL divergence sum
mu_div = tf.reduce_sum(tfd.kl_divergence(mu, self.mu_prior))
sigma_div = tf.reduce_sum(tfd.kl_divergence(sigma, self.sigma_prior))
theta_div = tf.reduce_sum(tfd.kl_divergence(theta, self.theta_prior))
kl_sum = mu_div + sigma_div + theta_div
# Return both losses
return log_likelihoods, kl_sum
def fit(self, data, nepochs):
optimizer = tf.keras.optimizers.Adam(lr=1e-3)
@tf.function
def train_step(data):
with tf.GradientTape() as tape:
log_likelihoods, kl_sum = self(data)
elbo_loss = kl_sum/N - tf.reduce_mean(log_likelihoods)
gradients = tape.gradient(elbo_loss, model.trainable_variables)
optimizer.apply_gradients(zip(gradients, model.trainable_variables))
for epoch in range(nepochs):
for data in dataset:
train_step(data)
def gmm_fit(model, data, nepochs):
optimizer = tf.keras.optimizers.Adam(lr=1e-3)
@tf.function
def train_step(data):
with tf.GradientTape() as tape:
log_likelihoods, kl_sum = model(data)
elbo_loss = kl_sum/N - tf.reduce_mean(log_likelihoods)
gradients = tape.gradient(elbo_loss, model.trainable_variables)
optimizer.apply_gradients(zip(gradients, model.trainable_variables))
for epoch in range(nepochs):
for data in dataset:
train_step(data)
nepochs = 1000
nmix = 3
ndim = 2
model = GaussianMixtureModel(nmix, ndim)
### Fitting
time_start = time()
method = 3
if method == 1:
model.fit(dataset, nepochs)
if method == 2:
gmm_fit(model, dataset, nepochs)
if method == 3:
# Relies on 'model' and 'optimizer' being in scope = yuk!
model = GaussianMixtureModel(nmix, ndim)
optimizer = tf.keras.optimizers.Adam(lr=1e-3)
@tf.function
def train_step(data):
with tf.GradientTape() as tape:
log_likelihoods, kl_sum = model(data)
elbo_loss = kl_sum/N - tf.reduce_mean(log_likelihoods)
gradients = tape.gradient(elbo_loss, model.trainable_variables)
optimizer.apply_gradients(zip(gradients, model.trainable_variables))
for epoch in range(nepochs):
for data in dataset:
train_step(data)
elapsed_time = (time() - time_start)
print('method {}'.format(method))
print(elapsed_time)
### Evaluation
# Compute log likelihood at each point on a grid
Np = 100 #number of grid points
Xp, Yp = np.meshgrid(np.linspace(-6, 6, Np), np.linspace(-6, 6, Np))
Pp = np.column_stack([Xp.flatten(), Yp.flatten()]).astype('float32')
Z, _ = model(Pp, sampling=False)
Z = np.reshape(Z, (Np, Np))
# Show the fit mixture density
plt.figure()
plt.imshow(np.exp(Z),
extent=(-6, 6, -6, 6),
origin='lower')
cbar = plt.colorbar()
cbar.ax.set_ylabel('Likelihood')
model.locs
model.trainable_variables
# Sample from the std deviation variational posterior
stds = tf.pow(tfd.Gamma(model.alpha, model.beta).sample(10000), -0.5)
# Plot the samples
plt.figure()
sns.distplot(stds[:, 0, 0])
# Sample from the mean variational posterior
means = tfd.Normal(model.locs, model.scales).sample(10000)
# Plot the mean samples for a single
plt.figure()
sns.kdeplot(means[:, 0, 0].numpy(),
means[:, 0, 1].numpy(),
n_levels=10)
| mit | 3,546,635,113,897,697,000 | 30.254098 | 106 | 0.600839 | false |
blacklin/kbengine | kbe/res/scripts/common/Lib/concurrent/futures/_base.py | 88 | 19638 | # Copyright 2009 Brian Quinlan. All Rights Reserved.
# Licensed to PSF under a Contributor Agreement.
__author__ = 'Brian Quinlan ([email protected])'
import collections
import logging
import threading
import time
FIRST_COMPLETED = 'FIRST_COMPLETED'
FIRST_EXCEPTION = 'FIRST_EXCEPTION'
ALL_COMPLETED = 'ALL_COMPLETED'
_AS_COMPLETED = '_AS_COMPLETED'
# Possible future states (for internal use by the futures package).
PENDING = 'PENDING'
RUNNING = 'RUNNING'
# The future was cancelled by the user...
CANCELLED = 'CANCELLED'
# ...and _Waiter.add_cancelled() was called by a worker.
CANCELLED_AND_NOTIFIED = 'CANCELLED_AND_NOTIFIED'
FINISHED = 'FINISHED'
_FUTURE_STATES = [
PENDING,
RUNNING,
CANCELLED,
CANCELLED_AND_NOTIFIED,
FINISHED
]
_STATE_TO_DESCRIPTION_MAP = {
PENDING: "pending",
RUNNING: "running",
CANCELLED: "cancelled",
CANCELLED_AND_NOTIFIED: "cancelled",
FINISHED: "finished"
}
# Logger for internal use by the futures package.
LOGGER = logging.getLogger("concurrent.futures")
class Error(Exception):
"""Base class for all future-related exceptions."""
pass
class CancelledError(Error):
"""The Future was cancelled."""
pass
class TimeoutError(Error):
"""The operation exceeded the given deadline."""
pass
class _Waiter(object):
"""Provides the event that wait() and as_completed() block on."""
def __init__(self):
self.event = threading.Event()
self.finished_futures = []
def add_result(self, future):
self.finished_futures.append(future)
def add_exception(self, future):
self.finished_futures.append(future)
def add_cancelled(self, future):
self.finished_futures.append(future)
class _AsCompletedWaiter(_Waiter):
"""Used by as_completed()."""
def __init__(self):
super(_AsCompletedWaiter, self).__init__()
self.lock = threading.Lock()
def add_result(self, future):
with self.lock:
super(_AsCompletedWaiter, self).add_result(future)
self.event.set()
def add_exception(self, future):
with self.lock:
super(_AsCompletedWaiter, self).add_exception(future)
self.event.set()
def add_cancelled(self, future):
with self.lock:
super(_AsCompletedWaiter, self).add_cancelled(future)
self.event.set()
class _FirstCompletedWaiter(_Waiter):
"""Used by wait(return_when=FIRST_COMPLETED)."""
def add_result(self, future):
super().add_result(future)
self.event.set()
def add_exception(self, future):
super().add_exception(future)
self.event.set()
def add_cancelled(self, future):
super().add_cancelled(future)
self.event.set()
class _AllCompletedWaiter(_Waiter):
"""Used by wait(return_when=FIRST_EXCEPTION and ALL_COMPLETED)."""
def __init__(self, num_pending_calls, stop_on_exception):
self.num_pending_calls = num_pending_calls
self.stop_on_exception = stop_on_exception
self.lock = threading.Lock()
super().__init__()
def _decrement_pending_calls(self):
with self.lock:
self.num_pending_calls -= 1
if not self.num_pending_calls:
self.event.set()
def add_result(self, future):
super().add_result(future)
self._decrement_pending_calls()
def add_exception(self, future):
super().add_exception(future)
if self.stop_on_exception:
self.event.set()
else:
self._decrement_pending_calls()
def add_cancelled(self, future):
super().add_cancelled(future)
self._decrement_pending_calls()
class _AcquireFutures(object):
"""A context manager that does an ordered acquire of Future conditions."""
def __init__(self, futures):
self.futures = sorted(futures, key=id)
def __enter__(self):
for future in self.futures:
future._condition.acquire()
def __exit__(self, *args):
for future in self.futures:
future._condition.release()
def _create_and_install_waiters(fs, return_when):
if return_when == _AS_COMPLETED:
waiter = _AsCompletedWaiter()
elif return_when == FIRST_COMPLETED:
waiter = _FirstCompletedWaiter()
else:
pending_count = sum(
f._state not in [CANCELLED_AND_NOTIFIED, FINISHED] for f in fs)
if return_when == FIRST_EXCEPTION:
waiter = _AllCompletedWaiter(pending_count, stop_on_exception=True)
elif return_when == ALL_COMPLETED:
waiter = _AllCompletedWaiter(pending_count, stop_on_exception=False)
else:
raise ValueError("Invalid return condition: %r" % return_when)
for f in fs:
f._waiters.append(waiter)
return waiter
def as_completed(fs, timeout=None):
"""An iterator over the given futures that yields each as it completes.
Args:
fs: The sequence of Futures (possibly created by different Executors) to
iterate over.
timeout: The maximum number of seconds to wait. If None, then there
is no limit on the wait time.
Returns:
An iterator that yields the given Futures as they complete (finished or
cancelled). If any given Futures are duplicated, they will be returned
once.
Raises:
TimeoutError: If the entire result iterator could not be generated
before the given timeout.
"""
if timeout is not None:
end_time = timeout + time.time()
fs = set(fs)
with _AcquireFutures(fs):
finished = set(
f for f in fs
if f._state in [CANCELLED_AND_NOTIFIED, FINISHED])
pending = fs - finished
waiter = _create_and_install_waiters(fs, _AS_COMPLETED)
try:
yield from finished
while pending:
if timeout is None:
wait_timeout = None
else:
wait_timeout = end_time - time.time()
if wait_timeout < 0:
raise TimeoutError(
'%d (of %d) futures unfinished' % (
len(pending), len(fs)))
waiter.event.wait(wait_timeout)
with waiter.lock:
finished = waiter.finished_futures
waiter.finished_futures = []
waiter.event.clear()
for future in finished:
yield future
pending.remove(future)
finally:
for f in fs:
with f._condition:
f._waiters.remove(waiter)
DoneAndNotDoneFutures = collections.namedtuple(
'DoneAndNotDoneFutures', 'done not_done')
def wait(fs, timeout=None, return_when=ALL_COMPLETED):
"""Wait for the futures in the given sequence to complete.
Args:
fs: The sequence of Futures (possibly created by different Executors) to
wait upon.
timeout: The maximum number of seconds to wait. If None, then there
is no limit on the wait time.
return_when: Indicates when this function should return. The options
are:
FIRST_COMPLETED - Return when any future finishes or is
cancelled.
FIRST_EXCEPTION - Return when any future finishes by raising an
exception. If no future raises an exception
then it is equivalent to ALL_COMPLETED.
ALL_COMPLETED - Return when all futures finish or are cancelled.
Returns:
A named 2-tuple of sets. The first set, named 'done', contains the
futures that completed (is finished or cancelled) before the wait
completed. The second set, named 'not_done', contains uncompleted
futures.
"""
with _AcquireFutures(fs):
done = set(f for f in fs
if f._state in [CANCELLED_AND_NOTIFIED, FINISHED])
not_done = set(fs) - done
if (return_when == FIRST_COMPLETED) and done:
return DoneAndNotDoneFutures(done, not_done)
elif (return_when == FIRST_EXCEPTION) and done:
if any(f for f in done
if not f.cancelled() and f.exception() is not None):
return DoneAndNotDoneFutures(done, not_done)
if len(done) == len(fs):
return DoneAndNotDoneFutures(done, not_done)
waiter = _create_and_install_waiters(fs, return_when)
waiter.event.wait(timeout)
for f in fs:
with f._condition:
f._waiters.remove(waiter)
done.update(waiter.finished_futures)
return DoneAndNotDoneFutures(done, set(fs) - done)
class Future(object):
"""Represents the result of an asynchronous computation."""
def __init__(self):
"""Initializes the future. Should not be called by clients."""
self._condition = threading.Condition()
self._state = PENDING
self._result = None
self._exception = None
self._waiters = []
self._done_callbacks = []
def _invoke_callbacks(self):
for callback in self._done_callbacks:
try:
callback(self)
except Exception:
LOGGER.exception('exception calling callback for %r', self)
def __repr__(self):
with self._condition:
if self._state == FINISHED:
if self._exception:
return '<Future at %s state=%s raised %s>' % (
hex(id(self)),
_STATE_TO_DESCRIPTION_MAP[self._state],
self._exception.__class__.__name__)
else:
return '<Future at %s state=%s returned %s>' % (
hex(id(self)),
_STATE_TO_DESCRIPTION_MAP[self._state],
self._result.__class__.__name__)
return '<Future at %s state=%s>' % (
hex(id(self)),
_STATE_TO_DESCRIPTION_MAP[self._state])
def cancel(self):
"""Cancel the future if possible.
Returns True if the future was cancelled, False otherwise. A future
cannot be cancelled if it is running or has already completed.
"""
with self._condition:
if self._state in [RUNNING, FINISHED]:
return False
if self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]:
return True
self._state = CANCELLED
self._condition.notify_all()
self._invoke_callbacks()
return True
def cancelled(self):
"""Return True if the future was cancelled."""
with self._condition:
return self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]
def running(self):
"""Return True if the future is currently executing."""
with self._condition:
return self._state == RUNNING
def done(self):
"""Return True of the future was cancelled or finished executing."""
with self._condition:
return self._state in [CANCELLED, CANCELLED_AND_NOTIFIED, FINISHED]
def __get_result(self):
if self._exception:
raise self._exception
else:
return self._result
def add_done_callback(self, fn):
"""Attaches a callable that will be called when the future finishes.
Args:
fn: A callable that will be called with this future as its only
argument when the future completes or is cancelled. The callable
will always be called by a thread in the same process in which
it was added. If the future has already completed or been
cancelled then the callable will be called immediately. These
callables are called in the order that they were added.
"""
with self._condition:
if self._state not in [CANCELLED, CANCELLED_AND_NOTIFIED, FINISHED]:
self._done_callbacks.append(fn)
return
fn(self)
def result(self, timeout=None):
"""Return the result of the call that the future represents.
Args:
timeout: The number of seconds to wait for the result if the future
isn't done. If None, then there is no limit on the wait time.
Returns:
The result of the call that the future represents.
Raises:
CancelledError: If the future was cancelled.
TimeoutError: If the future didn't finish executing before the given
timeout.
Exception: If the call raised then that exception will be raised.
"""
with self._condition:
if self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]:
raise CancelledError()
elif self._state == FINISHED:
return self.__get_result()
self._condition.wait(timeout)
if self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]:
raise CancelledError()
elif self._state == FINISHED:
return self.__get_result()
else:
raise TimeoutError()
def exception(self, timeout=None):
"""Return the exception raised by the call that the future represents.
Args:
timeout: The number of seconds to wait for the exception if the
future isn't done. If None, then there is no limit on the wait
time.
Returns:
The exception raised by the call that the future represents or None
if the call completed without raising.
Raises:
CancelledError: If the future was cancelled.
TimeoutError: If the future didn't finish executing before the given
timeout.
"""
with self._condition:
if self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]:
raise CancelledError()
elif self._state == FINISHED:
return self._exception
self._condition.wait(timeout)
if self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]:
raise CancelledError()
elif self._state == FINISHED:
return self._exception
else:
raise TimeoutError()
# The following methods should only be used by Executors and in tests.
def set_running_or_notify_cancel(self):
"""Mark the future as running or process any cancel notifications.
Should only be used by Executor implementations and unit tests.
If the future has been cancelled (cancel() was called and returned
True) then any threads waiting on the future completing (though calls
to as_completed() or wait()) are notified and False is returned.
If the future was not cancelled then it is put in the running state
(future calls to running() will return True) and True is returned.
This method should be called by Executor implementations before
executing the work associated with this future. If this method returns
False then the work should not be executed.
Returns:
False if the Future was cancelled, True otherwise.
Raises:
RuntimeError: if this method was already called or if set_result()
or set_exception() was called.
"""
with self._condition:
if self._state == CANCELLED:
self._state = CANCELLED_AND_NOTIFIED
for waiter in self._waiters:
waiter.add_cancelled(self)
# self._condition.notify_all() is not necessary because
# self.cancel() triggers a notification.
return False
elif self._state == PENDING:
self._state = RUNNING
return True
else:
LOGGER.critical('Future %s in unexpected state: %s',
id(self),
self._state)
raise RuntimeError('Future in unexpected state')
def set_result(self, result):
"""Sets the return value of work associated with the future.
Should only be used by Executor implementations and unit tests.
"""
with self._condition:
self._result = result
self._state = FINISHED
for waiter in self._waiters:
waiter.add_result(self)
self._condition.notify_all()
self._invoke_callbacks()
def set_exception(self, exception):
"""Sets the result of the future as being the given exception.
Should only be used by Executor implementations and unit tests.
"""
with self._condition:
self._exception = exception
self._state = FINISHED
for waiter in self._waiters:
waiter.add_exception(self)
self._condition.notify_all()
self._invoke_callbacks()
class Executor(object):
"""This is an abstract base class for concrete asynchronous executors."""
def submit(self, fn, *args, **kwargs):
"""Submits a callable to be executed with the given arguments.
Schedules the callable to be executed as fn(*args, **kwargs) and returns
a Future instance representing the execution of the callable.
Returns:
A Future representing the given call.
"""
raise NotImplementedError()
def map(self, fn, *iterables, timeout=None):
"""Returns a iterator equivalent to map(fn, iter).
Args:
fn: A callable that will take as many arguments as there are
passed iterables.
timeout: The maximum number of seconds to wait. If None, then there
is no limit on the wait time.
Returns:
An iterator equivalent to: map(func, *iterables) but the calls may
be evaluated out-of-order.
Raises:
TimeoutError: If the entire result iterator could not be generated
before the given timeout.
Exception: If fn(*args) raises for any values.
"""
if timeout is not None:
end_time = timeout + time.time()
fs = [self.submit(fn, *args) for args in zip(*iterables)]
# Yield must be hidden in closure so that the futures are submitted
# before the first iterator value is required.
def result_iterator():
try:
for future in fs:
if timeout is None:
yield future.result()
else:
yield future.result(end_time - time.time())
finally:
for future in fs:
future.cancel()
return result_iterator()
def shutdown(self, wait=True):
"""Clean-up the resources associated with the Executor.
It is safe to call this method several times. Otherwise, no other
methods can be called after this one.
Args:
wait: If True then shutdown will not return until all running
futures have finished executing and the resources used by the
executor have been reclaimed.
"""
pass
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.shutdown(wait=True)
return False
| lgpl-3.0 | -6,628,829,400,186,757,000 | 33.153043 | 80 | 0.587789 | false |
igoralmeida/tahoe-lafs | src/allmydata/test/test_deepcheck.py | 6 | 60303 |
import os, simplejson, urllib
from cStringIO import StringIO
from twisted.trial import unittest
from twisted.internet import defer
from twisted.internet import threads # CLI tests use deferToThread
from allmydata.immutable import upload
from allmydata.mutable.common import UnrecoverableFileError
from allmydata.mutable.publish import MutableData
from allmydata.util import idlib
from allmydata.util import base32
from allmydata.scripts import runner
from allmydata.interfaces import ICheckResults, ICheckAndRepairResults, \
IDeepCheckResults, IDeepCheckAndRepairResults
from allmydata.monitor import Monitor, OperationCancelledError
from allmydata.uri import LiteralFileURI
from twisted.web.client import getPage
from allmydata.test.common import ErrorMixin, _corrupt_mutable_share_data, \
ShouldFailMixin
from allmydata.test.common_util import StallMixin
from allmydata.test.no_network import GridTestMixin
timeout = 2400 # One of these took 1046.091s on Zandr's ARM box.
class MutableChecker(GridTestMixin, unittest.TestCase, ErrorMixin):
def _run_cli(self, argv):
stdout, stderr = StringIO(), StringIO()
# this can only do synchronous operations
assert argv[0] == "debug"
runner.runner(argv, run_by_human=False, stdout=stdout, stderr=stderr)
return stdout.getvalue()
def test_good(self):
self.basedir = "deepcheck/MutableChecker/good"
self.set_up_grid()
CONTENTS = "a little bit of data"
CONTENTS_uploadable = MutableData(CONTENTS)
d = self.g.clients[0].create_mutable_file(CONTENTS_uploadable)
def _created(node):
self.node = node
self.fileurl = "uri/" + urllib.quote(node.get_uri())
d.addCallback(_created)
# now make sure the webapi verifier sees no problems
d.addCallback(lambda ign: self.GET(self.fileurl+"?t=check&verify=true",
method="POST"))
def _got_results(out):
self.failUnless("<span>Healthy : Healthy</span>" in out, out)
self.failUnless("Recoverable Versions: 10*seq1-" in out, out)
self.failIf("Not Healthy!" in out, out)
self.failIf("Unhealthy" in out, out)
self.failIf("Corrupt Shares" in out, out)
d.addCallback(_got_results)
d.addErrback(self.explain_web_error)
return d
def test_corrupt(self):
self.basedir = "deepcheck/MutableChecker/corrupt"
self.set_up_grid()
CONTENTS = "a little bit of data"
CONTENTS_uploadable = MutableData(CONTENTS)
d = self.g.clients[0].create_mutable_file(CONTENTS_uploadable)
def _stash_and_corrupt(node):
self.node = node
self.fileurl = "uri/" + urllib.quote(node.get_uri())
self.corrupt_shares_numbered(node.get_uri(), [0],
_corrupt_mutable_share_data)
d.addCallback(_stash_and_corrupt)
# now make sure the webapi verifier notices it
d.addCallback(lambda ign: self.GET(self.fileurl+"?t=check&verify=true",
method="POST"))
def _got_results(out):
self.failUnless("Not Healthy!" in out, out)
self.failUnless("Unhealthy: best version has only 9 shares (encoding is 3-of-10)" in out, out)
self.failUnless("Corrupt Shares:" in out, out)
d.addCallback(_got_results)
# now make sure the webapi repairer can fix it
d.addCallback(lambda ign:
self.GET(self.fileurl+"?t=check&verify=true&repair=true",
method="POST"))
def _got_repair_results(out):
self.failUnless("<div>Repair successful</div>" in out, out)
d.addCallback(_got_repair_results)
d.addCallback(lambda ign: self.GET(self.fileurl+"?t=check&verify=true",
method="POST"))
def _got_postrepair_results(out):
self.failIf("Not Healthy!" in out, out)
self.failUnless("Recoverable Versions: 10*seq" in out, out)
d.addCallback(_got_postrepair_results)
d.addErrback(self.explain_web_error)
return d
def test_delete_share(self):
self.basedir = "deepcheck/MutableChecker/delete_share"
self.set_up_grid()
CONTENTS = "a little bit of data"
CONTENTS_uploadable = MutableData(CONTENTS)
d = self.g.clients[0].create_mutable_file(CONTENTS_uploadable)
def _stash_and_delete(node):
self.node = node
self.fileurl = "uri/" + urllib.quote(node.get_uri())
self.delete_shares_numbered(node.get_uri(), [0])
d.addCallback(_stash_and_delete)
# now make sure the webapi checker notices it
d.addCallback(lambda ign: self.GET(self.fileurl+"?t=check&verify=false",
method="POST"))
def _got_results(out):
self.failUnless("Not Healthy!" in out, out)
self.failUnless("Unhealthy: best version has only 9 shares (encoding is 3-of-10)" in out, out)
self.failIf("Corrupt Shares" in out, out)
d.addCallback(_got_results)
# now make sure the webapi repairer can fix it
d.addCallback(lambda ign:
self.GET(self.fileurl+"?t=check&verify=false&repair=true",
method="POST"))
def _got_repair_results(out):
self.failUnless("Repair successful" in out)
d.addCallback(_got_repair_results)
d.addCallback(lambda ign: self.GET(self.fileurl+"?t=check&verify=false",
method="POST"))
def _got_postrepair_results(out):
self.failIf("Not Healthy!" in out, out)
self.failUnless("Recoverable Versions: 10*seq" in out)
d.addCallback(_got_postrepair_results)
d.addErrback(self.explain_web_error)
return d
class DeepCheckBase(GridTestMixin, ErrorMixin, StallMixin, ShouldFailMixin):
def web_json(self, n, **kwargs):
kwargs["output"] = "json"
d = self.web(n, "POST", **kwargs)
d.addCallback(self.decode_json)
return d
def decode_json(self, (s,url)):
try:
data = simplejson.loads(s)
except ValueError:
self.fail("%s: not JSON: '%s'" % (url, s))
return data
def parse_streamed_json(self, s):
for unit in s.split("\n"):
if not unit:
# stream should end with a newline, so split returns ""
continue
try:
yield simplejson.loads(unit)
except ValueError, le:
le.args = tuple(le.args + (unit,))
raise
def web(self, n, method="GET", **kwargs):
# returns (data, url)
url = (self.client_baseurls[0] + "uri/%s" % urllib.quote(n.get_uri())
+ "?" + "&".join(["%s=%s" % (k,v) for (k,v) in kwargs.items()]))
d = getPage(url, method=method)
d.addCallback(lambda data: (data,url))
return d
def wait_for_operation(self, ignored, ophandle):
url = self.client_baseurls[0] + "operations/" + ophandle
url += "?t=status&output=JSON"
d = getPage(url)
def _got(res):
try:
data = simplejson.loads(res)
except ValueError:
self.fail("%s: not JSON: '%s'" % (url, res))
if not data["finished"]:
d = self.stall(delay=1.0)
d.addCallback(self.wait_for_operation, ophandle)
return d
return data
d.addCallback(_got)
return d
def get_operation_results(self, ignored, ophandle, output=None):
url = self.client_baseurls[0] + "operations/" + ophandle
url += "?t=status"
if output:
url += "&output=" + output
d = getPage(url)
def _got(res):
if output and output.lower() == "json":
try:
return simplejson.loads(res)
except ValueError:
self.fail("%s: not JSON: '%s'" % (url, res))
return res
d.addCallback(_got)
return d
def slow_web(self, n, output=None, **kwargs):
# use ophandle=
handle = base32.b2a(os.urandom(4))
d = self.web(n, "POST", ophandle=handle, **kwargs)
d.addCallback(self.wait_for_operation, handle)
d.addCallback(self.get_operation_results, handle, output=output)
return d
class DeepCheckWebGood(DeepCheckBase, unittest.TestCase):
# construct a small directory tree (with one dir, one immutable file, one
# mutable file, two LIT files, one DIR2:LIT empty dir, one DIR2:LIT tiny
# dir, and a loop), and then check/examine it in various ways.
def set_up_tree(self):
# 2.9s
c0 = self.g.clients[0]
d = c0.create_dirnode()
def _created_root(n):
self.root = n
self.root_uri = n.get_uri()
d.addCallback(_created_root)
d.addCallback(lambda ign:
c0.create_mutable_file(MutableData("mutable file contents")))
d.addCallback(lambda n: self.root.set_node(u"mutable", n))
def _created_mutable(n):
self.mutable = n
self.mutable_uri = n.get_uri()
d.addCallback(_created_mutable)
large = upload.Data("Lots of data\n" * 1000, None)
d.addCallback(lambda ign: self.root.add_file(u"large", large))
def _created_large(n):
self.large = n
self.large_uri = n.get_uri()
d.addCallback(_created_large)
small = upload.Data("Small enough for a LIT", None)
d.addCallback(lambda ign: self.root.add_file(u"small", small))
def _created_small(n):
self.small = n
self.small_uri = n.get_uri()
d.addCallback(_created_small)
small2 = upload.Data("Small enough for a LIT too", None)
d.addCallback(lambda ign: self.root.add_file(u"small2", small2))
def _created_small2(n):
self.small2 = n
self.small2_uri = n.get_uri()
d.addCallback(_created_small2)
empty_litdir_uri = "URI:DIR2-LIT:"
tiny_litdir_uri = "URI:DIR2-LIT:gqytunj2onug64tufqzdcosvkjetutcjkq5gw4tvm5vwszdgnz5hgyzufqydulbshj5x2lbm" # contains one child which is itself also LIT
d.addCallback(lambda ign: self.root._create_and_validate_node(None, empty_litdir_uri, name=u"test_deepcheck empty_lit_dir"))
def _created_empty_lit_dir(n):
self.empty_lit_dir = n
self.empty_lit_dir_uri = n.get_uri()
self.root.set_node(u"empty_lit_dir", n)
d.addCallback(_created_empty_lit_dir)
d.addCallback(lambda ign: self.root._create_and_validate_node(None, tiny_litdir_uri, name=u"test_deepcheck tiny_lit_dir"))
def _created_tiny_lit_dir(n):
self.tiny_lit_dir = n
self.tiny_lit_dir_uri = n.get_uri()
self.root.set_node(u"tiny_lit_dir", n)
d.addCallback(_created_tiny_lit_dir)
d.addCallback(lambda ign: self.root.set_node(u"loop", self.root))
return d
def check_is_healthy(self, cr, n, where, incomplete=False):
self.failUnless(ICheckResults.providedBy(cr), where)
self.failUnless(cr.is_healthy(), where)
self.failUnlessEqual(cr.get_storage_index(), n.get_storage_index(),
where)
self.failUnlessEqual(cr.get_storage_index_string(),
base32.b2a(n.get_storage_index()), where)
num_servers = len(self.g.all_servers)
self.failUnlessEqual(num_servers, 10, where)
self.failUnlessEqual(cr.get_happiness(), num_servers, where)
self.failUnlessEqual(cr.get_share_counter_good(), num_servers, where)
self.failUnlessEqual(cr.get_encoding_needed(), 3, where)
self.failUnlessEqual(cr.get_encoding_expected(), num_servers, where)
if not incomplete:
self.failUnlessEqual(cr.get_host_counter_good_shares(),
num_servers, where)
self.failUnlessEqual(cr.get_corrupt_shares(), [], where)
if not incomplete:
self.failUnlessEqual(sorted([s.get_serverid()
for s in cr.get_servers_responding()]),
sorted(self.g.get_all_serverids()),
where)
all_serverids = set()
for (shareid, servers) in cr.get_sharemap().items():
all_serverids.update([s.get_serverid() for s in servers])
self.failUnlessEqual(sorted(all_serverids),
sorted(self.g.get_all_serverids()),
where)
self.failUnlessEqual(cr.get_share_counter_wrong(), 0, where)
self.failUnlessEqual(cr.get_version_counter_recoverable(), 1, where)
self.failUnlessEqual(cr.get_version_counter_unrecoverable(), 0, where)
def check_and_repair_is_healthy(self, cr, n, where, incomplete=False):
self.failUnless(ICheckAndRepairResults.providedBy(cr), (where, cr))
self.failUnless(cr.get_pre_repair_results().is_healthy(), where)
self.check_is_healthy(cr.get_pre_repair_results(), n, where, incomplete)
self.failUnless(cr.get_post_repair_results().is_healthy(), where)
self.check_is_healthy(cr.get_post_repair_results(), n, where, incomplete)
self.failIf(cr.get_repair_attempted(), where)
def deep_check_is_healthy(self, cr, num_healthy, where):
self.failUnless(IDeepCheckResults.providedBy(cr))
self.failUnlessEqual(cr.get_counters()["count-objects-healthy"],
num_healthy, where)
def deep_check_and_repair_is_healthy(self, cr, num_healthy, where):
self.failUnless(IDeepCheckAndRepairResults.providedBy(cr), where)
c = cr.get_counters()
self.failUnlessEqual(c["count-objects-healthy-pre-repair"],
num_healthy, where)
self.failUnlessEqual(c["count-objects-healthy-post-repair"],
num_healthy, where)
self.failUnlessEqual(c["count-repairs-attempted"], 0, where)
def test_good(self):
self.basedir = "deepcheck/DeepCheckWebGood/good"
self.set_up_grid()
d = self.set_up_tree()
d.addCallback(self.do_stats)
d.addCallback(self.do_web_stream_manifest)
d.addCallback(self.do_web_stream_check)
d.addCallback(self.do_test_check_good)
d.addCallback(self.do_test_web_good)
d.addCallback(self.do_test_cli_good)
d.addErrback(self.explain_web_error)
d.addErrback(self.explain_error)
return d
def do_stats(self, ignored):
d = defer.succeed(None)
d.addCallback(lambda ign: self.root.start_deep_stats().when_done())
d.addCallback(self.check_stats_good)
return d
def check_stats_good(self, s):
self.failUnlessEqual(s["count-directories"], 3)
self.failUnlessEqual(s["count-files"], 5)
self.failUnlessEqual(s["count-immutable-files"], 1)
self.failUnlessEqual(s["count-literal-files"], 3)
self.failUnlessEqual(s["count-mutable-files"], 1)
# don't check directories: their size will vary
# s["largest-directory"]
# s["size-directories"]
self.failUnlessEqual(s["largest-directory-children"], 7)
self.failUnlessEqual(s["largest-immutable-file"], 13000)
# to re-use this function for both the local
# dirnode.start_deep_stats() and the webapi t=start-deep-stats, we
# coerce the result into a list of tuples. dirnode.start_deep_stats()
# returns a list of tuples, but JSON only knows about lists., so
# t=start-deep-stats returns a list of lists.
histogram = [tuple(stuff) for stuff in s["size-files-histogram"]]
self.failUnlessEqual(histogram, [(4, 10, 1), (11, 31, 2),
(10001, 31622, 1),
])
self.failUnlessEqual(s["size-immutable-files"], 13000)
self.failUnlessEqual(s["size-literal-files"], 56)
def do_web_stream_manifest(self, ignored):
d = self.web(self.root, method="POST", t="stream-manifest")
d.addCallback(lambda (output,url):
self._check_streamed_manifest(output))
return d
def _check_streamed_manifest(self, output):
units = list(self.parse_streamed_json(output))
files = [u for u in units if u["type"] in ("file", "directory")]
assert units[-1]["type"] == "stats"
stats = units[-1]["stats"]
self.failUnlessEqual(len(files), 8)
# [root,mutable,large] are distributed, [small,small2,empty_litdir,tiny_litdir] are not
self.failUnlessEqual(len([f for f in files
if f["verifycap"] != ""]), 3)
self.failUnlessEqual(len([f for f in files
if f["verifycap"] == ""]), 5)
self.failUnlessEqual(len([f for f in files
if f["repaircap"] != ""]), 3)
self.failUnlessEqual(len([f for f in files
if f["repaircap"] == ""]), 5)
self.failUnlessEqual(len([f for f in files
if f["storage-index"] != ""]), 3)
self.failUnlessEqual(len([f for f in files
if f["storage-index"] == ""]), 5)
# make sure that a mutable file has filecap==repaircap!=verifycap
mutable = [f for f in files
if f["cap"] is not None
and f["cap"].startswith("URI:SSK:")][0]
self.failUnlessEqual(mutable["cap"], self.mutable_uri)
self.failIfEqual(mutable["cap"], mutable["verifycap"])
self.failUnlessEqual(mutable["cap"], mutable["repaircap"])
# for immutable file, verifycap==repaircap!=filecap
large = [f for f in files
if f["cap"] is not None
and f["cap"].startswith("URI:CHK:")][0]
self.failUnlessEqual(large["cap"], self.large_uri)
self.failIfEqual(large["cap"], large["verifycap"])
self.failUnlessEqual(large["verifycap"], large["repaircap"])
self.check_stats_good(stats)
def do_web_stream_check(self, ignored):
# TODO
return
d = self.web(self.root, t="stream-deep-check")
def _check(res):
units = list(self.parse_streamed_json(res))
#files = [u for u in units if u["type"] in ("file", "directory")]
assert units[-1]["type"] == "stats"
#stats = units[-1]["stats"]
# ...
d.addCallback(_check)
return d
def do_test_check_good(self, ignored):
d = defer.succeed(None)
# check the individual items
d.addCallback(lambda ign: self.root.check(Monitor()))
d.addCallback(self.check_is_healthy, self.root, "root")
d.addCallback(lambda ign: self.mutable.check(Monitor()))
d.addCallback(self.check_is_healthy, self.mutable, "mutable")
d.addCallback(lambda ign: self.large.check(Monitor()))
d.addCallback(self.check_is_healthy, self.large, "large")
d.addCallback(lambda ign: self.small.check(Monitor()))
d.addCallback(self.failUnlessEqual, None, "small")
d.addCallback(lambda ign: self.small2.check(Monitor()))
d.addCallback(self.failUnlessEqual, None, "small2")
d.addCallback(lambda ign: self.empty_lit_dir.check(Monitor()))
d.addCallback(self.failUnlessEqual, None, "empty_lit_dir")
d.addCallback(lambda ign: self.tiny_lit_dir.check(Monitor()))
d.addCallback(self.failUnlessEqual, None, "tiny_lit_dir")
# and again with verify=True
d.addCallback(lambda ign: self.root.check(Monitor(), verify=True))
d.addCallback(self.check_is_healthy, self.root, "root")
d.addCallback(lambda ign: self.mutable.check(Monitor(), verify=True))
d.addCallback(self.check_is_healthy, self.mutable, "mutable")
d.addCallback(lambda ign: self.large.check(Monitor(), verify=True))
d.addCallback(self.check_is_healthy, self.large, "large", incomplete=True)
d.addCallback(lambda ign: self.small.check(Monitor(), verify=True))
d.addCallback(self.failUnlessEqual, None, "small")
d.addCallback(lambda ign: self.small2.check(Monitor(), verify=True))
d.addCallback(self.failUnlessEqual, None, "small2")
d.addCallback(lambda ign: self.empty_lit_dir.check(Monitor(), verify=True))
d.addCallback(self.failUnlessEqual, None, "empty_lit_dir")
d.addCallback(lambda ign: self.tiny_lit_dir.check(Monitor(), verify=True))
d.addCallback(self.failUnlessEqual, None, "tiny_lit_dir")
# and check_and_repair(), which should be a nop
d.addCallback(lambda ign: self.root.check_and_repair(Monitor()))
d.addCallback(self.check_and_repair_is_healthy, self.root, "root")
d.addCallback(lambda ign: self.mutable.check_and_repair(Monitor()))
d.addCallback(self.check_and_repair_is_healthy, self.mutable, "mutable")
d.addCallback(lambda ign: self.large.check_and_repair(Monitor()))
d.addCallback(self.check_and_repair_is_healthy, self.large, "large")
d.addCallback(lambda ign: self.small.check_and_repair(Monitor()))
d.addCallback(self.failUnlessEqual, None, "small")
d.addCallback(lambda ign: self.small2.check_and_repair(Monitor()))
d.addCallback(self.failUnlessEqual, None, "small2")
d.addCallback(lambda ign: self.empty_lit_dir.check_and_repair(Monitor()))
d.addCallback(self.failUnlessEqual, None, "empty_lit_dir")
d.addCallback(lambda ign: self.tiny_lit_dir.check_and_repair(Monitor()))
# check_and_repair(verify=True)
d.addCallback(lambda ign: self.root.check_and_repair(Monitor(), verify=True))
d.addCallback(self.check_and_repair_is_healthy, self.root, "root")
d.addCallback(lambda ign: self.mutable.check_and_repair(Monitor(), verify=True))
d.addCallback(self.check_and_repair_is_healthy, self.mutable, "mutable")
d.addCallback(lambda ign: self.large.check_and_repair(Monitor(), verify=True))
d.addCallback(self.check_and_repair_is_healthy, self.large, "large", incomplete=True)
d.addCallback(lambda ign: self.small.check_and_repair(Monitor(), verify=True))
d.addCallback(self.failUnlessEqual, None, "small")
d.addCallback(lambda ign: self.small2.check_and_repair(Monitor(), verify=True))
d.addCallback(self.failUnlessEqual, None, "small2")
d.addCallback(self.failUnlessEqual, None, "small2")
d.addCallback(lambda ign: self.empty_lit_dir.check_and_repair(Monitor(), verify=True))
d.addCallback(self.failUnlessEqual, None, "empty_lit_dir")
d.addCallback(lambda ign: self.tiny_lit_dir.check_and_repair(Monitor(), verify=True))
# now deep-check the root, with various verify= and repair= options
d.addCallback(lambda ign:
self.root.start_deep_check().when_done())
d.addCallback(self.deep_check_is_healthy, 3, "root")
d.addCallback(lambda ign:
self.root.start_deep_check(verify=True).when_done())
d.addCallback(self.deep_check_is_healthy, 3, "root")
d.addCallback(lambda ign:
self.root.start_deep_check_and_repair().when_done())
d.addCallback(self.deep_check_and_repair_is_healthy, 3, "root")
d.addCallback(lambda ign:
self.root.start_deep_check_and_repair(verify=True).when_done())
d.addCallback(self.deep_check_and_repair_is_healthy, 3, "root")
# and finally, start a deep-check, but then cancel it.
d.addCallback(lambda ign: self.root.start_deep_check())
def _checking(monitor):
monitor.cancel()
d = monitor.when_done()
# this should fire as soon as the next dirnode.list finishes.
# TODO: add a counter to measure how many list() calls are made,
# assert that no more than one gets to run before the cancel()
# takes effect.
def _finished_normally(res):
self.fail("this was supposed to fail, not finish normally")
def _cancelled(f):
f.trap(OperationCancelledError)
d.addCallbacks(_finished_normally, _cancelled)
return d
d.addCallback(_checking)
return d
def json_check_is_healthy(self, data, n, where, incomplete=False):
self.failUnlessEqual(data["storage-index"],
base32.b2a(n.get_storage_index()), where)
self.failUnless("summary" in data, (where, data))
self.failUnlessEqual(data["summary"].lower(), "healthy",
"%s: '%s'" % (where, data["summary"]))
r = data["results"]
self.failUnlessEqual(r["healthy"], True, where)
num_servers = len(self.g.all_servers)
self.failUnlessEqual(num_servers, 10)
self.failIfIn("needs-rebalancing", r)
self.failUnlessEqual(r["count-happiness"], num_servers, where)
self.failUnlessEqual(r["count-shares-good"], num_servers, where)
self.failUnlessEqual(r["count-shares-needed"], 3, where)
self.failUnlessEqual(r["count-shares-expected"], num_servers, where)
if not incomplete:
self.failUnlessEqual(r["count-good-share-hosts"], num_servers,
where)
self.failUnlessEqual(r["count-corrupt-shares"], 0, where)
self.failUnlessEqual(r["list-corrupt-shares"], [], where)
if not incomplete:
self.failUnlessEqual(sorted(r["servers-responding"]),
sorted([idlib.nodeid_b2a(sid)
for sid in self.g.get_all_serverids()]),
where)
self.failUnless("sharemap" in r, where)
all_serverids = set()
for (shareid, serverids_s) in r["sharemap"].items():
all_serverids.update(serverids_s)
self.failUnlessEqual(sorted(all_serverids),
sorted([idlib.nodeid_b2a(sid)
for sid in self.g.get_all_serverids()]),
where)
self.failUnlessEqual(r["count-wrong-shares"], 0, where)
self.failUnlessEqual(r["count-recoverable-versions"], 1, where)
self.failUnlessEqual(r["count-unrecoverable-versions"], 0, where)
def json_check_and_repair_is_healthy(self, data, n, where, incomplete=False):
self.failUnlessEqual(data["storage-index"],
base32.b2a(n.get_storage_index()), where)
self.failUnlessEqual(data["repair-attempted"], False, where)
self.json_check_is_healthy(data["pre-repair-results"],
n, where, incomplete)
self.json_check_is_healthy(data["post-repair-results"],
n, where, incomplete)
def json_full_deepcheck_is_healthy(self, data, n, where):
self.failUnlessEqual(data["root-storage-index"],
base32.b2a(n.get_storage_index()), where)
self.failUnlessEqual(data["count-objects-checked"], 3, where)
self.failUnlessEqual(data["count-objects-healthy"], 3, where)
self.failUnlessEqual(data["count-objects-unhealthy"], 0, where)
self.failUnlessEqual(data["count-corrupt-shares"], 0, where)
self.failUnlessEqual(data["list-corrupt-shares"], [], where)
self.failUnlessEqual(data["list-unhealthy-files"], [], where)
self.json_check_stats_good(data["stats"], where)
def json_full_deepcheck_and_repair_is_healthy(self, data, n, where):
self.failUnlessEqual(data["root-storage-index"],
base32.b2a(n.get_storage_index()), where)
self.failUnlessEqual(data["count-objects-checked"], 3, where)
self.failUnlessEqual(data["count-objects-healthy-pre-repair"], 3, where)
self.failUnlessEqual(data["count-objects-unhealthy-pre-repair"], 0, where)
self.failUnlessEqual(data["count-corrupt-shares-pre-repair"], 0, where)
self.failUnlessEqual(data["count-objects-healthy-post-repair"], 3, where)
self.failUnlessEqual(data["count-objects-unhealthy-post-repair"], 0, where)
self.failUnlessEqual(data["count-corrupt-shares-post-repair"], 0, where)
self.failUnlessEqual(data["list-corrupt-shares"], [], where)
self.failUnlessEqual(data["list-remaining-corrupt-shares"], [], where)
self.failUnlessEqual(data["list-unhealthy-files"], [], where)
self.failUnlessEqual(data["count-repairs-attempted"], 0, where)
self.failUnlessEqual(data["count-repairs-successful"], 0, where)
self.failUnlessEqual(data["count-repairs-unsuccessful"], 0, where)
def json_check_lit(self, data, n, where):
self.failUnlessEqual(data["storage-index"], "", where)
self.failUnlessEqual(data["results"]["healthy"], True, where)
def json_check_stats_good(self, data, where):
self.check_stats_good(data)
def do_test_web_good(self, ignored):
d = defer.succeed(None)
# stats
d.addCallback(lambda ign:
self.slow_web(self.root,
t="start-deep-stats", output="json"))
d.addCallback(self.json_check_stats_good, "deep-stats")
# check, no verify
d.addCallback(lambda ign: self.web_json(self.root, t="check"))
d.addCallback(self.json_check_is_healthy, self.root, "root")
d.addCallback(lambda ign: self.web_json(self.mutable, t="check"))
d.addCallback(self.json_check_is_healthy, self.mutable, "mutable")
d.addCallback(lambda ign: self.web_json(self.large, t="check"))
d.addCallback(self.json_check_is_healthy, self.large, "large")
d.addCallback(lambda ign: self.web_json(self.small, t="check"))
d.addCallback(self.json_check_lit, self.small, "small")
d.addCallback(lambda ign: self.web_json(self.small2, t="check"))
d.addCallback(self.json_check_lit, self.small2, "small2")
d.addCallback(lambda ign: self.web_json(self.empty_lit_dir, t="check"))
d.addCallback(self.json_check_lit, self.empty_lit_dir, "empty_lit_dir")
d.addCallback(lambda ign: self.web_json(self.tiny_lit_dir, t="check"))
d.addCallback(self.json_check_lit, self.tiny_lit_dir, "tiny_lit_dir")
# check and verify
d.addCallback(lambda ign:
self.web_json(self.root, t="check", verify="true"))
d.addCallback(self.json_check_is_healthy, self.root, "root+v")
d.addCallback(lambda ign:
self.web_json(self.mutable, t="check", verify="true"))
d.addCallback(self.json_check_is_healthy, self.mutable, "mutable+v")
d.addCallback(lambda ign:
self.web_json(self.large, t="check", verify="true"))
d.addCallback(self.json_check_is_healthy, self.large, "large+v",
incomplete=True)
d.addCallback(lambda ign:
self.web_json(self.small, t="check", verify="true"))
d.addCallback(self.json_check_lit, self.small, "small+v")
d.addCallback(lambda ign:
self.web_json(self.small2, t="check", verify="true"))
d.addCallback(self.json_check_lit, self.small2, "small2+v")
d.addCallback(lambda ign: self.web_json(self.empty_lit_dir, t="check", verify="true"))
d.addCallback(self.json_check_lit, self.empty_lit_dir, "empty_lit_dir+v")
d.addCallback(lambda ign: self.web_json(self.tiny_lit_dir, t="check", verify="true"))
d.addCallback(self.json_check_lit, self.tiny_lit_dir, "tiny_lit_dir+v")
# check and repair, no verify
d.addCallback(lambda ign:
self.web_json(self.root, t="check", repair="true"))
d.addCallback(self.json_check_and_repair_is_healthy, self.root, "root+r")
d.addCallback(lambda ign:
self.web_json(self.mutable, t="check", repair="true"))
d.addCallback(self.json_check_and_repair_is_healthy, self.mutable, "mutable+r")
d.addCallback(lambda ign:
self.web_json(self.large, t="check", repair="true"))
d.addCallback(self.json_check_and_repair_is_healthy, self.large, "large+r")
d.addCallback(lambda ign:
self.web_json(self.small, t="check", repair="true"))
d.addCallback(self.json_check_lit, self.small, "small+r")
d.addCallback(lambda ign:
self.web_json(self.small2, t="check", repair="true"))
d.addCallback(self.json_check_lit, self.small2, "small2+r")
d.addCallback(lambda ign: self.web_json(self.empty_lit_dir, t="check", repair="true"))
d.addCallback(self.json_check_lit, self.empty_lit_dir, "empty_lit_dir+r")
d.addCallback(lambda ign: self.web_json(self.tiny_lit_dir, t="check", repair="true"))
d.addCallback(self.json_check_lit, self.tiny_lit_dir, "tiny_lit_dir+r")
# check+verify+repair
d.addCallback(lambda ign:
self.web_json(self.root, t="check", repair="true", verify="true"))
d.addCallback(self.json_check_and_repair_is_healthy, self.root, "root+vr")
d.addCallback(lambda ign:
self.web_json(self.mutable, t="check", repair="true", verify="true"))
d.addCallback(self.json_check_and_repair_is_healthy, self.mutable, "mutable+vr")
d.addCallback(lambda ign:
self.web_json(self.large, t="check", repair="true", verify="true"))
d.addCallback(self.json_check_and_repair_is_healthy, self.large, "large+vr", incomplete=True)
d.addCallback(lambda ign:
self.web_json(self.small, t="check", repair="true", verify="true"))
d.addCallback(self.json_check_lit, self.small, "small+vr")
d.addCallback(lambda ign:
self.web_json(self.small2, t="check", repair="true", verify="true"))
d.addCallback(self.json_check_lit, self.small2, "small2+vr")
d.addCallback(lambda ign: self.web_json(self.empty_lit_dir, t="check", repair="true", verify=True))
d.addCallback(self.json_check_lit, self.empty_lit_dir, "empty_lit_dir+vr")
d.addCallback(lambda ign: self.web_json(self.tiny_lit_dir, t="check", repair="true", verify=True))
d.addCallback(self.json_check_lit, self.tiny_lit_dir, "tiny_lit_dir+vr")
# now run a deep-check, with various verify= and repair= flags
d.addCallback(lambda ign:
self.slow_web(self.root, t="start-deep-check", output="json"))
d.addCallback(self.json_full_deepcheck_is_healthy, self.root, "root+d")
d.addCallback(lambda ign:
self.slow_web(self.root, t="start-deep-check", verify="true",
output="json"))
d.addCallback(self.json_full_deepcheck_is_healthy, self.root, "root+dv")
d.addCallback(lambda ign:
self.slow_web(self.root, t="start-deep-check", repair="true",
output="json"))
d.addCallback(self.json_full_deepcheck_and_repair_is_healthy, self.root, "root+dr")
d.addCallback(lambda ign:
self.slow_web(self.root, t="start-deep-check", verify="true", repair="true", output="json"))
d.addCallback(self.json_full_deepcheck_and_repair_is_healthy, self.root, "root+dvr")
# now look at t=info
d.addCallback(lambda ign: self.web(self.root, t="info"))
# TODO: examine the output
d.addCallback(lambda ign: self.web(self.mutable, t="info"))
d.addCallback(lambda ign: self.web(self.large, t="info"))
d.addCallback(lambda ign: self.web(self.small, t="info"))
d.addCallback(lambda ign: self.web(self.small2, t="info"))
d.addCallback(lambda ign: self.web(self.empty_lit_dir, t="info"))
d.addCallback(lambda ign: self.web(self.tiny_lit_dir, t="info"))
return d
def _run_cli(self, argv, stdin=""):
#print "CLI:", argv
stdout, stderr = StringIO(), StringIO()
d = threads.deferToThread(runner.runner, argv, run_by_human=False,
stdin=StringIO(stdin),
stdout=stdout, stderr=stderr)
def _done(res):
return stdout.getvalue(), stderr.getvalue()
d.addCallback(_done)
return d
def do_test_cli_good(self, ignored):
d = defer.succeed(None)
d.addCallback(lambda ign: self.do_cli_manifest_stream1())
d.addCallback(lambda ign: self.do_cli_manifest_stream2())
d.addCallback(lambda ign: self.do_cli_manifest_stream3())
d.addCallback(lambda ign: self.do_cli_manifest_stream4())
d.addCallback(lambda ign: self.do_cli_manifest_stream5())
d.addCallback(lambda ign: self.do_cli_stats1())
d.addCallback(lambda ign: self.do_cli_stats2())
return d
def _check_manifest_storage_index(self, out):
lines = [l for l in out.split("\n") if l]
self.failUnlessEqual(len(lines), 3)
self.failUnless(base32.b2a(self.root.get_storage_index()) in lines)
self.failUnless(base32.b2a(self.mutable.get_storage_index()) in lines)
self.failUnless(base32.b2a(self.large.get_storage_index()) in lines)
def do_cli_manifest_stream1(self):
basedir = self.get_clientdir(0)
d = self._run_cli(["--node-directory", basedir,
"manifest",
self.root_uri])
def _check((out,err)):
self.failUnlessEqual(err, "")
lines = [l for l in out.split("\n") if l]
self.failUnlessEqual(len(lines), 8)
caps = {}
for l in lines:
try:
cap, path = l.split(None, 1)
except ValueError:
cap = l.strip()
path = ""
caps[cap] = path
self.failUnless(self.root.get_uri() in caps)
self.failUnlessEqual(caps[self.root.get_uri()], "")
self.failUnlessEqual(caps[self.mutable.get_uri()], "mutable")
self.failUnlessEqual(caps[self.large.get_uri()], "large")
self.failUnlessEqual(caps[self.small.get_uri()], "small")
self.failUnlessEqual(caps[self.small2.get_uri()], "small2")
self.failUnlessEqual(caps[self.empty_lit_dir.get_uri()], "empty_lit_dir")
self.failUnlessEqual(caps[self.tiny_lit_dir.get_uri()], "tiny_lit_dir")
d.addCallback(_check)
return d
def do_cli_manifest_stream2(self):
basedir = self.get_clientdir(0)
d = self._run_cli(["--node-directory", basedir,
"manifest",
"--raw",
self.root_uri])
def _check((out,err)):
self.failUnlessEqual(err, "")
# this should be the same as the POST t=stream-manifest output
self._check_streamed_manifest(out)
d.addCallback(_check)
return d
def do_cli_manifest_stream3(self):
basedir = self.get_clientdir(0)
d = self._run_cli(["--node-directory", basedir,
"manifest",
"--storage-index",
self.root_uri])
def _check((out,err)):
self.failUnlessEqual(err, "")
self._check_manifest_storage_index(out)
d.addCallback(_check)
return d
def do_cli_manifest_stream4(self):
basedir = self.get_clientdir(0)
d = self._run_cli(["--node-directory", basedir,
"manifest",
"--verify-cap",
self.root_uri])
def _check((out,err)):
self.failUnlessEqual(err, "")
lines = [l for l in out.split("\n") if l]
self.failUnlessEqual(len(lines), 3)
self.failUnless(self.root.get_verify_cap().to_string() in lines)
self.failUnless(self.mutable.get_verify_cap().to_string() in lines)
self.failUnless(self.large.get_verify_cap().to_string() in lines)
d.addCallback(_check)
return d
def do_cli_manifest_stream5(self):
basedir = self.get_clientdir(0)
d = self._run_cli(["--node-directory", basedir,
"manifest",
"--repair-cap",
self.root_uri])
def _check((out,err)):
self.failUnlessEqual(err, "")
lines = [l for l in out.split("\n") if l]
self.failUnlessEqual(len(lines), 3)
self.failUnless(self.root.get_repair_cap().to_string() in lines)
self.failUnless(self.mutable.get_repair_cap().to_string() in lines)
self.failUnless(self.large.get_repair_cap().to_string() in lines)
d.addCallback(_check)
return d
def do_cli_stats1(self):
basedir = self.get_clientdir(0)
d = self._run_cli(["--node-directory", basedir,
"stats",
self.root_uri])
def _check3((out,err)):
lines = [l.strip() for l in out.split("\n") if l]
self.failUnless("count-immutable-files: 1" in lines)
self.failUnless("count-mutable-files: 1" in lines)
self.failUnless("count-literal-files: 3" in lines)
self.failUnless("count-files: 5" in lines)
self.failUnless("count-directories: 3" in lines)
self.failUnless("size-immutable-files: 13000 (13.00 kB, 12.70 kiB)" in lines, lines)
self.failUnless("size-literal-files: 56" in lines, lines)
self.failUnless(" 4-10 : 1 (10 B, 10 B)".strip() in lines, lines)
self.failUnless(" 11-31 : 2 (31 B, 31 B)".strip() in lines, lines)
self.failUnless("10001-31622 : 1 (31.62 kB, 30.88 kiB)".strip() in lines, lines)
d.addCallback(_check3)
return d
def do_cli_stats2(self):
basedir = self.get_clientdir(0)
d = self._run_cli(["--node-directory", basedir,
"stats",
"--raw",
self.root_uri])
def _check4((out,err)):
data = simplejson.loads(out)
self.failUnlessEqual(data["count-immutable-files"], 1)
self.failUnlessEqual(data["count-immutable-files"], 1)
self.failUnlessEqual(data["count-mutable-files"], 1)
self.failUnlessEqual(data["count-literal-files"], 3)
self.failUnlessEqual(data["count-files"], 5)
self.failUnlessEqual(data["count-directories"], 3)
self.failUnlessEqual(data["size-immutable-files"], 13000)
self.failUnlessEqual(data["size-literal-files"], 56)
self.failUnless([4,10,1] in data["size-files-histogram"])
self.failUnless([11,31,2] in data["size-files-histogram"])
self.failUnless([10001,31622,1] in data["size-files-histogram"])
d.addCallback(_check4)
return d
class DeepCheckWebBad(DeepCheckBase, unittest.TestCase):
def test_bad(self):
self.basedir = "deepcheck/DeepCheckWebBad/bad"
self.set_up_grid()
d = self.set_up_damaged_tree()
d.addCallback(self.do_check)
d.addCallback(self.do_deepcheck)
d.addCallback(self.do_deepcheck_broken)
d.addCallback(self.do_test_web_bad)
d.addErrback(self.explain_web_error)
d.addErrback(self.explain_error)
return d
def set_up_damaged_tree(self):
# 6.4s
# root
# mutable-good
# mutable-missing-shares
# mutable-corrupt-shares
# mutable-unrecoverable
# large-good
# large-missing-shares
# large-corrupt-shares
# large-unrecoverable
# broken
# large1-good
# subdir-good
# large2-good
# subdir-unrecoverable
# large3-good
self.nodes = {}
c0 = self.g.clients[0]
d = c0.create_dirnode()
def _created_root(n):
self.root = n
self.root_uri = n.get_uri()
d.addCallback(_created_root)
d.addCallback(self.create_mangled, "mutable-good")
d.addCallback(self.create_mangled, "mutable-missing-shares")
d.addCallback(self.create_mangled, "mutable-corrupt-shares")
d.addCallback(self.create_mangled, "mutable-unrecoverable")
d.addCallback(self.create_mangled, "large-good")
d.addCallback(self.create_mangled, "large-missing-shares")
d.addCallback(self.create_mangled, "large-corrupt-shares")
d.addCallback(self.create_mangled, "large-unrecoverable")
d.addCallback(lambda ignored: c0.create_dirnode())
d.addCallback(self._stash_node, "broken")
large1 = upload.Data("Lots of data\n" * 1000 + "large1" + "\n", None)
d.addCallback(lambda ignored:
self.nodes["broken"].add_file(u"large1", large1))
d.addCallback(lambda ignored:
self.nodes["broken"].create_subdirectory(u"subdir-good"))
large2 = upload.Data("Lots of data\n" * 1000 + "large2" + "\n", None)
d.addCallback(lambda subdir: subdir.add_file(u"large2-good", large2))
d.addCallback(lambda ignored:
self.nodes["broken"].create_subdirectory(u"subdir-unrecoverable"))
d.addCallback(self._stash_node, "subdir-unrecoverable")
large3 = upload.Data("Lots of data\n" * 1000 + "large3" + "\n", None)
d.addCallback(lambda subdir: subdir.add_file(u"large3-good", large3))
d.addCallback(lambda ignored:
self._delete_most_shares(self.nodes["broken"]))
return d
def _stash_node(self, node, name):
self.nodes[name] = node
return node
def create_mangled(self, ignored, name):
nodetype, mangletype = name.split("-", 1)
if nodetype == "mutable":
mutable_uploadable = MutableData("mutable file contents")
d = self.g.clients[0].create_mutable_file(mutable_uploadable)
d.addCallback(lambda n: self.root.set_node(unicode(name), n))
elif nodetype == "large":
large = upload.Data("Lots of data\n" * 1000 + name + "\n", None)
d = self.root.add_file(unicode(name), large)
elif nodetype == "small":
small = upload.Data("Small enough for a LIT", None)
d = self.root.add_file(unicode(name), small)
d.addCallback(self._stash_node, name)
if mangletype == "good":
pass
elif mangletype == "missing-shares":
d.addCallback(self._delete_some_shares)
elif mangletype == "corrupt-shares":
d.addCallback(self._corrupt_some_shares)
else:
assert mangletype == "unrecoverable"
d.addCallback(self._delete_most_shares)
return d
def _run_cli(self, argv):
stdout, stderr = StringIO(), StringIO()
# this can only do synchronous operations
assert argv[0] == "debug"
runner.runner(argv, run_by_human=False, stdout=stdout, stderr=stderr)
return stdout.getvalue()
def _delete_some_shares(self, node):
self.delete_shares_numbered(node.get_uri(), [0,1])
def _corrupt_some_shares(self, node):
for (shnum, serverid, sharefile) in self.find_uri_shares(node.get_uri()):
if shnum in (0,1):
self._run_cli(["debug", "corrupt-share", sharefile])
def _delete_most_shares(self, node):
self.delete_shares_numbered(node.get_uri(), range(1,10))
def check_is_healthy(self, cr, where):
try:
self.failUnless(ICheckResults.providedBy(cr), (cr, type(cr), where))
self.failUnless(cr.is_healthy(), (cr.get_report(), cr.is_healthy(), cr.get_summary(), where))
self.failUnless(cr.is_recoverable(), where)
self.failUnlessEqual(cr.get_version_counter_recoverable(), 1, where)
self.failUnlessEqual(cr.get_version_counter_unrecoverable(), 0, where)
return cr
except Exception, le:
le.args = tuple(le.args + (where,))
raise
def check_is_missing_shares(self, cr, where):
self.failUnless(ICheckResults.providedBy(cr), where)
self.failIf(cr.is_healthy(), where)
self.failUnless(cr.is_recoverable(), where)
self.failUnlessEqual(cr.get_version_counter_recoverable(), 1, where)
self.failUnlessEqual(cr.get_version_counter_unrecoverable(), 0, where)
return cr
def check_has_corrupt_shares(self, cr, where):
# by "corrupt-shares" we mean the file is still recoverable
self.failUnless(ICheckResults.providedBy(cr), where)
self.failIf(cr.is_healthy(), (where, cr))
self.failUnless(cr.is_recoverable(), where)
self.failUnless(cr.get_share_counter_good() < 10, where)
self.failUnless(cr.get_corrupt_shares(), where)
return cr
def check_is_unrecoverable(self, cr, where):
self.failUnless(ICheckResults.providedBy(cr), where)
self.failIf(cr.is_healthy(), where)
self.failIf(cr.is_recoverable(), where)
self.failUnless(cr.get_share_counter_good() < cr.get_encoding_needed(),
(cr.get_share_counter_good(), cr.get_encoding_needed(),
where))
self.failUnlessEqual(cr.get_version_counter_recoverable(), 0, where)
self.failUnlessEqual(cr.get_version_counter_unrecoverable(), 1, where)
return cr
def do_check(self, ignored):
d = defer.succeed(None)
# check the individual items, without verification. This will not
# detect corrupt shares.
def _check(which, checker):
d = self.nodes[which].check(Monitor())
d.addCallback(checker, which + "--check")
return d
d.addCallback(lambda ign: _check("mutable-good", self.check_is_healthy))
d.addCallback(lambda ign: _check("mutable-missing-shares",
self.check_is_missing_shares))
d.addCallback(lambda ign: _check("mutable-corrupt-shares",
self.check_is_healthy))
d.addCallback(lambda ign: _check("mutable-unrecoverable",
self.check_is_unrecoverable))
d.addCallback(lambda ign: _check("large-good", self.check_is_healthy))
d.addCallback(lambda ign: _check("large-missing-shares",
self.check_is_missing_shares))
d.addCallback(lambda ign: _check("large-corrupt-shares",
self.check_is_healthy))
d.addCallback(lambda ign: _check("large-unrecoverable",
self.check_is_unrecoverable))
# and again with verify=True, which *does* detect corrupt shares.
def _checkv(which, checker):
d = self.nodes[which].check(Monitor(), verify=True)
d.addCallback(checker, which + "--check-and-verify")
return d
d.addCallback(lambda ign: _checkv("mutable-good", self.check_is_healthy))
d.addCallback(lambda ign: _checkv("mutable-missing-shares",
self.check_is_missing_shares))
d.addCallback(lambda ign: _checkv("mutable-corrupt-shares",
self.check_has_corrupt_shares))
d.addCallback(lambda ign: _checkv("mutable-unrecoverable",
self.check_is_unrecoverable))
d.addCallback(lambda ign: _checkv("large-good", self.check_is_healthy))
d.addCallback(lambda ign: _checkv("large-missing-shares", self.check_is_missing_shares))
d.addCallback(lambda ign: _checkv("large-corrupt-shares", self.check_has_corrupt_shares))
d.addCallback(lambda ign: _checkv("large-unrecoverable",
self.check_is_unrecoverable))
return d
def do_deepcheck(self, ignored):
d = defer.succeed(None)
# now deep-check the root, with various verify= and repair= options
d.addCallback(lambda ign:
self.root.start_deep_check().when_done())
def _check1(cr):
self.failUnless(IDeepCheckResults.providedBy(cr))
c = cr.get_counters()
self.failUnlessEqual(c["count-objects-checked"], 9)
self.failUnlessEqual(c["count-objects-healthy"], 5)
self.failUnlessEqual(c["count-objects-unhealthy"], 4)
self.failUnlessEqual(c["count-objects-unrecoverable"], 2)
d.addCallback(_check1)
d.addCallback(lambda ign:
self.root.start_deep_check(verify=True).when_done())
def _check2(cr):
self.failUnless(IDeepCheckResults.providedBy(cr))
c = cr.get_counters()
self.failUnlessEqual(c["count-objects-checked"], 9)
self.failUnlessEqual(c["count-objects-healthy"], 3)
self.failUnlessEqual(c["count-objects-unhealthy"], 6)
self.failUnlessEqual(c["count-objects-healthy"], 3) # root, mutable good, large good
self.failUnlessEqual(c["count-objects-unrecoverable"], 2) # mutable unrecoverable, large unrecoverable
d.addCallback(_check2)
return d
def do_deepcheck_broken(self, ignored):
# deep-check on the broken directory should fail, because of the
# untraversable subdir
def _do_deep_check():
return self.nodes["broken"].start_deep_check().when_done()
d = self.shouldFail(UnrecoverableFileError, "do_deep_check",
"no recoverable versions",
_do_deep_check)
return d
def json_is_healthy(self, data, where):
r = data["results"]
self.failUnless(r["healthy"], where)
self.failUnless(r["recoverable"], where)
self.failUnlessEqual(r["count-recoverable-versions"], 1, where)
self.failUnlessEqual(r["count-unrecoverable-versions"], 0, where)
def json_is_missing_shares(self, data, where):
r = data["results"]
self.failIf(r["healthy"], where)
self.failUnless(r["recoverable"], where)
self.failUnlessEqual(r["count-recoverable-versions"], 1, where)
self.failUnlessEqual(r["count-unrecoverable-versions"], 0, where)
def json_has_corrupt_shares(self, data, where):
# by "corrupt-shares" we mean the file is still recoverable
r = data["results"]
self.failIf(r["healthy"], where)
self.failUnless(r["recoverable"], where)
self.failUnless(r["count-shares-good"] < 10, where)
self.failUnless(r["count-corrupt-shares"], where)
self.failUnless(r["list-corrupt-shares"], where)
def json_is_unrecoverable(self, data, where):
r = data["results"]
self.failIf(r["healthy"], where)
self.failIf(r["recoverable"], where)
self.failUnless(r["count-shares-good"] < r["count-shares-needed"],
where)
self.failUnlessEqual(r["count-recoverable-versions"], 0, where)
self.failUnlessEqual(r["count-unrecoverable-versions"], 1, where)
def do_test_web_bad(self, ignored):
d = defer.succeed(None)
# check, no verify
def _check(which, checker):
d = self.web_json(self.nodes[which], t="check")
d.addCallback(checker, which + "--webcheck")
return d
d.addCallback(lambda ign: _check("mutable-good",
self.json_is_healthy))
d.addCallback(lambda ign: _check("mutable-missing-shares",
self.json_is_missing_shares))
d.addCallback(lambda ign: _check("mutable-corrupt-shares",
self.json_is_healthy))
d.addCallback(lambda ign: _check("mutable-unrecoverable",
self.json_is_unrecoverable))
d.addCallback(lambda ign: _check("large-good",
self.json_is_healthy))
d.addCallback(lambda ign: _check("large-missing-shares",
self.json_is_missing_shares))
d.addCallback(lambda ign: _check("large-corrupt-shares",
self.json_is_healthy))
d.addCallback(lambda ign: _check("large-unrecoverable",
self.json_is_unrecoverable))
# check and verify
def _checkv(which, checker):
d = self.web_json(self.nodes[which], t="check", verify="true")
d.addCallback(checker, which + "--webcheck-and-verify")
return d
d.addCallback(lambda ign: _checkv("mutable-good",
self.json_is_healthy))
d.addCallback(lambda ign: _checkv("mutable-missing-shares",
self.json_is_missing_shares))
d.addCallback(lambda ign: _checkv("mutable-corrupt-shares",
self.json_has_corrupt_shares))
d.addCallback(lambda ign: _checkv("mutable-unrecoverable",
self.json_is_unrecoverable))
d.addCallback(lambda ign: _checkv("large-good",
self.json_is_healthy))
d.addCallback(lambda ign: _checkv("large-missing-shares", self.json_is_missing_shares))
d.addCallback(lambda ign: _checkv("large-corrupt-shares", self.json_has_corrupt_shares))
d.addCallback(lambda ign: _checkv("large-unrecoverable",
self.json_is_unrecoverable))
return d
class Large(DeepCheckBase, unittest.TestCase):
def test_lots_of_lits(self):
self.basedir = "deepcheck/Large/lots_of_lits"
self.set_up_grid()
# create the following directory structure:
# root/
# subdir/
# 000-large (CHK)
# 001-small (LIT)
# 002-small
# ...
# 399-small
# then do a deepcheck and make sure it doesn't cause a
# Deferred-tail-recursion stack overflow
COUNT = 400
c0 = self.g.clients[0]
d = c0.create_dirnode()
self.stash = {}
def _created_root(n):
self.root = n
return n
d.addCallback(_created_root)
d.addCallback(lambda root: root.create_subdirectory(u"subdir"))
def _add_children(subdir_node):
self.subdir_node = subdir_node
kids = {}
for i in range(1, COUNT):
litcap = LiteralFileURI("%03d-data" % i).to_string()
kids[u"%03d-small" % i] = (litcap, litcap)
return subdir_node.set_children(kids)
d.addCallback(_add_children)
up = upload.Data("large enough for CHK" * 100, "")
d.addCallback(lambda ign: self.subdir_node.add_file(u"0000-large", up))
def _start_deepcheck(ignored):
return self.web(self.root, method="POST", t="stream-deep-check")
d.addCallback(_start_deepcheck)
def _check( (output, url) ):
units = list(self.parse_streamed_json(output))
self.failUnlessEqual(len(units), 2+COUNT+1)
d.addCallback(_check)
return d
| gpl-2.0 | -2,333,116,309,238,673,000 | 47.088517 | 159 | 0.589506 | false |
z-jason/anki | thirdparty/BeautifulSoup.py | 20 | 79554 | """Beautiful Soup
Elixir and Tonic
"The Screen-Scraper's Friend"
http://www.crummy.com/software/BeautifulSoup/
Beautiful Soup parses a (possibly invalid) XML or HTML document into a
tree representation. It provides methods and Pythonic idioms that make
it easy to navigate, search, and modify the tree.
A well-formed XML/HTML document yields a well-formed data
structure. An ill-formed XML/HTML document yields a correspondingly
ill-formed data structure. If your document is only locally
well-formed, you can use this library to find and process the
well-formed part of it.
Beautiful Soup works with Python 2.2 and up. It has no external
dependencies, but you'll have more success at converting data to UTF-8
if you also install these three packages:
* chardet, for auto-detecting character encodings
http://chardet.feedparser.org/
* cjkcodecs and iconv_codec, which add more encodings to the ones supported
by stock Python.
http://cjkpython.i18n.org/
Beautiful Soup defines classes for two main parsing strategies:
* BeautifulStoneSoup, for parsing XML, SGML, or your domain-specific
language that kind of looks like XML.
* BeautifulSoup, for parsing run-of-the-mill HTML code, be it valid
or invalid. This class has web browser-like heuristics for
obtaining a sensible parse tree in the face of common HTML errors.
Beautiful Soup also defines a class (UnicodeDammit) for autodetecting
the encoding of an HTML or XML document, and converting it to
Unicode. Much of this code is taken from Mark Pilgrim's Universal Feed Parser.
For more than you ever wanted to know about Beautiful Soup, see the
documentation:
http://www.crummy.com/software/BeautifulSoup/documentation.html
Here, have some legalese:
Copyright (c) 2004-2010, Leonard Richardson
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials provided
with the distribution.
* Neither the name of the the Beautiful Soup Consortium and All
Night Kosher Bakery nor the names of its contributors may be
used to endorse or promote products derived from this software
without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE, DAMMIT.
"""
from __future__ import generators
__author__ = "Leonard Richardson ([email protected])"
__version__ = "3.2.1"
__copyright__ = "Copyright (c) 2004-2012 Leonard Richardson"
__license__ = "New-style BSD"
from sgmllib import SGMLParser, SGMLParseError
import codecs
import markupbase
import re
import sgmllib
try:
from htmlentitydefs import name2codepoint
except ImportError:
name2codepoint = {}
try:
set
except NameError:
from sets import Set as set
#These hacks make Beautiful Soup able to parse XML with namespaces
sgmllib.tagfind = re.compile('[a-zA-Z][-_.:a-zA-Z0-9]*')
markupbase._declname_match = re.compile(r'[a-zA-Z][-_.:a-zA-Z0-9]*\s*').match
DEFAULT_OUTPUT_ENCODING = "utf-8"
def _match_css_class(str):
"""Build a RE to match the given CSS class."""
return re.compile(r"(^|.*\s)%s($|\s)" % str)
# First, the classes that represent markup elements.
class PageElement(object):
"""Contains the navigational information for some part of the page
(either a tag or a piece of text)"""
def _invert(h):
"Cheap function to invert a hash."
i = {}
for k,v in h.items():
i[v] = k
return i
XML_ENTITIES_TO_SPECIAL_CHARS = { "apos" : "'",
"quot" : '"',
"amp" : "&",
"lt" : "<",
"gt" : ">" }
XML_SPECIAL_CHARS_TO_ENTITIES = _invert(XML_ENTITIES_TO_SPECIAL_CHARS)
def setup(self, parent=None, previous=None):
"""Sets up the initial relations between this element and
other elements."""
self.parent = parent
self.previous = previous
self.next = None
self.previousSibling = None
self.nextSibling = None
if self.parent and self.parent.contents:
self.previousSibling = self.parent.contents[-1]
self.previousSibling.nextSibling = self
def replaceWith(self, replaceWith):
oldParent = self.parent
myIndex = self.parent.index(self)
if hasattr(replaceWith, "parent")\
and replaceWith.parent is self.parent:
# We're replacing this element with one of its siblings.
index = replaceWith.parent.index(replaceWith)
if index and index < myIndex:
# Furthermore, it comes before this element. That
# means that when we extract it, the index of this
# element will change.
myIndex = myIndex - 1
self.extract()
oldParent.insert(myIndex, replaceWith)
def replaceWithChildren(self):
myParent = self.parent
myIndex = self.parent.index(self)
self.extract()
reversedChildren = list(self.contents)
reversedChildren.reverse()
for child in reversedChildren:
myParent.insert(myIndex, child)
def extract(self):
"""Destructively rips this element out of the tree."""
if self.parent:
try:
del self.parent.contents[self.parent.index(self)]
except ValueError:
pass
#Find the two elements that would be next to each other if
#this element (and any children) hadn't been parsed. Connect
#the two.
lastChild = self._lastRecursiveChild()
nextElement = lastChild.next
if self.previous:
self.previous.next = nextElement
if nextElement:
nextElement.previous = self.previous
self.previous = None
lastChild.next = None
self.parent = None
if self.previousSibling:
self.previousSibling.nextSibling = self.nextSibling
if self.nextSibling:
self.nextSibling.previousSibling = self.previousSibling
self.previousSibling = self.nextSibling = None
return self
def _lastRecursiveChild(self):
"Finds the last element beneath this object to be parsed."
lastChild = self
while hasattr(lastChild, 'contents') and lastChild.contents:
lastChild = lastChild.contents[-1]
return lastChild
def insert(self, position, newChild):
if isinstance(newChild, basestring) \
and not isinstance(newChild, NavigableString):
newChild = NavigableString(newChild)
position = min(position, len(self.contents))
if hasattr(newChild, 'parent') and newChild.parent is not None:
# We're 'inserting' an element that's already one
# of this object's children.
if newChild.parent is self:
index = self.index(newChild)
if index > position:
# Furthermore we're moving it further down the
# list of this object's children. That means that
# when we extract this element, our target index
# will jump down one.
position = position - 1
newChild.extract()
newChild.parent = self
previousChild = None
if position == 0:
newChild.previousSibling = None
newChild.previous = self
else:
previousChild = self.contents[position-1]
newChild.previousSibling = previousChild
newChild.previousSibling.nextSibling = newChild
newChild.previous = previousChild._lastRecursiveChild()
if newChild.previous:
newChild.previous.next = newChild
newChildsLastElement = newChild._lastRecursiveChild()
if position >= len(self.contents):
newChild.nextSibling = None
parent = self
parentsNextSibling = None
while not parentsNextSibling:
parentsNextSibling = parent.nextSibling
parent = parent.parent
if not parent: # This is the last element in the document.
break
if parentsNextSibling:
newChildsLastElement.next = parentsNextSibling
else:
newChildsLastElement.next = None
else:
nextChild = self.contents[position]
newChild.nextSibling = nextChild
if newChild.nextSibling:
newChild.nextSibling.previousSibling = newChild
newChildsLastElement.next = nextChild
if newChildsLastElement.next:
newChildsLastElement.next.previous = newChildsLastElement
self.contents.insert(position, newChild)
def append(self, tag):
"""Appends the given tag to the contents of this tag."""
self.insert(len(self.contents), tag)
def findNext(self, name=None, attrs={}, text=None, **kwargs):
"""Returns the first item that matches the given criteria and
appears after this Tag in the document."""
return self._findOne(self.findAllNext, name, attrs, text, **kwargs)
def findAllNext(self, name=None, attrs={}, text=None, limit=None,
**kwargs):
"""Returns all items that match the given criteria and appear
after this Tag in the document."""
return self._findAll(name, attrs, text, limit, self.nextGenerator,
**kwargs)
def findNextSibling(self, name=None, attrs={}, text=None, **kwargs):
"""Returns the closest sibling to this Tag that matches the
given criteria and appears after this Tag in the document."""
return self._findOne(self.findNextSiblings, name, attrs, text,
**kwargs)
def findNextSiblings(self, name=None, attrs={}, text=None, limit=None,
**kwargs):
"""Returns the siblings of this Tag that match the given
criteria and appear after this Tag in the document."""
return self._findAll(name, attrs, text, limit,
self.nextSiblingGenerator, **kwargs)
fetchNextSiblings = findNextSiblings # Compatibility with pre-3.x
def findPrevious(self, name=None, attrs={}, text=None, **kwargs):
"""Returns the first item that matches the given criteria and
appears before this Tag in the document."""
return self._findOne(self.findAllPrevious, name, attrs, text, **kwargs)
def findAllPrevious(self, name=None, attrs={}, text=None, limit=None,
**kwargs):
"""Returns all items that match the given criteria and appear
before this Tag in the document."""
return self._findAll(name, attrs, text, limit, self.previousGenerator,
**kwargs)
fetchPrevious = findAllPrevious # Compatibility with pre-3.x
def findPreviousSibling(self, name=None, attrs={}, text=None, **kwargs):
"""Returns the closest sibling to this Tag that matches the
given criteria and appears before this Tag in the document."""
return self._findOne(self.findPreviousSiblings, name, attrs, text,
**kwargs)
def findPreviousSiblings(self, name=None, attrs={}, text=None,
limit=None, **kwargs):
"""Returns the siblings of this Tag that match the given
criteria and appear before this Tag in the document."""
return self._findAll(name, attrs, text, limit,
self.previousSiblingGenerator, **kwargs)
fetchPreviousSiblings = findPreviousSiblings # Compatibility with pre-3.x
def findParent(self, name=None, attrs={}, **kwargs):
"""Returns the closest parent of this Tag that matches the given
criteria."""
# NOTE: We can't use _findOne because findParents takes a different
# set of arguments.
r = None
l = self.findParents(name, attrs, 1)
if l:
r = l[0]
return r
def findParents(self, name=None, attrs={}, limit=None, **kwargs):
"""Returns the parents of this Tag that match the given
criteria."""
return self._findAll(name, attrs, None, limit, self.parentGenerator,
**kwargs)
fetchParents = findParents # Compatibility with pre-3.x
#These methods do the real heavy lifting.
def _findOne(self, method, name, attrs, text, **kwargs):
r = None
l = method(name, attrs, text, 1, **kwargs)
if l:
r = l[0]
return r
def _findAll(self, name, attrs, text, limit, generator, **kwargs):
"Iterates over a generator looking for things that match."
if isinstance(name, SoupStrainer):
strainer = name
# (Possibly) special case some findAll*(...) searches
elif text is None and not limit and not attrs and not kwargs:
# findAll*(True)
if name is True:
return [element for element in generator()
if isinstance(element, Tag)]
# findAll*('tag-name')
elif isinstance(name, basestring):
return [element for element in generator()
if isinstance(element, Tag) and
element.name == name]
else:
strainer = SoupStrainer(name, attrs, text, **kwargs)
# Build a SoupStrainer
else:
strainer = SoupStrainer(name, attrs, text, **kwargs)
results = ResultSet(strainer)
g = generator()
while True:
try:
i = g.next()
except StopIteration:
break
if i:
found = strainer.search(i)
if found:
results.append(found)
if limit and len(results) >= limit:
break
return results
#These Generators can be used to navigate starting from both
#NavigableStrings and Tags.
def nextGenerator(self):
i = self
while i is not None:
i = i.next
yield i
def nextSiblingGenerator(self):
i = self
while i is not None:
i = i.nextSibling
yield i
def previousGenerator(self):
i = self
while i is not None:
i = i.previous
yield i
def previousSiblingGenerator(self):
i = self
while i is not None:
i = i.previousSibling
yield i
def parentGenerator(self):
i = self
while i is not None:
i = i.parent
yield i
# Utility methods
def substituteEncoding(self, str, encoding=None):
encoding = encoding or "utf-8"
return str.replace("%SOUP-ENCODING%", encoding)
def toEncoding(self, s, encoding=None):
"""Encodes an object to a string in some encoding, or to Unicode.
."""
if isinstance(s, unicode):
if encoding:
s = s.encode(encoding)
elif isinstance(s, str):
if encoding:
s = s.encode(encoding)
else:
s = unicode(s)
else:
if encoding:
s = self.toEncoding(str(s), encoding)
else:
s = unicode(s)
return s
BARE_AMPERSAND_OR_BRACKET = re.compile("([<>]|"
+ "&(?!#\d+;|#x[0-9a-fA-F]+;|\w+;)"
+ ")")
def _sub_entity(self, x):
"""Used with a regular expression to substitute the
appropriate XML entity for an XML special character."""
return "&" + self.XML_SPECIAL_CHARS_TO_ENTITIES[x.group(0)[0]] + ";"
class NavigableString(unicode, PageElement):
def __new__(cls, value):
"""Create a new NavigableString.
When unpickling a NavigableString, this method is called with
the string in DEFAULT_OUTPUT_ENCODING. That encoding needs to be
passed in to the superclass's __new__ or the superclass won't know
how to handle non-ASCII characters.
"""
if isinstance(value, unicode):
return unicode.__new__(cls, value)
return unicode.__new__(cls, value, DEFAULT_OUTPUT_ENCODING)
def __getnewargs__(self):
return (NavigableString.__str__(self),)
def __getattr__(self, attr):
"""text.string gives you text. This is for backwards
compatibility for Navigable*String, but for CData* it lets you
get the string without the CData wrapper."""
if attr == 'string':
return self
else:
raise AttributeError, "'%s' object has no attribute '%s'" % (self.__class__.__name__, attr)
def __unicode__(self):
return str(self).decode(DEFAULT_OUTPUT_ENCODING)
def __str__(self, encoding=DEFAULT_OUTPUT_ENCODING):
# Substitute outgoing XML entities.
data = self.BARE_AMPERSAND_OR_BRACKET.sub(self._sub_entity, self)
if encoding:
return data.encode(encoding)
else:
return data
class CData(NavigableString):
def __str__(self, encoding=DEFAULT_OUTPUT_ENCODING):
return "<![CDATA[%s]]>" % NavigableString.__str__(self, encoding)
class ProcessingInstruction(NavigableString):
def __str__(self, encoding=DEFAULT_OUTPUT_ENCODING):
output = self
if "%SOUP-ENCODING%" in output:
output = self.substituteEncoding(output, encoding)
return "<?%s?>" % self.toEncoding(output, encoding)
class Comment(NavigableString):
def __str__(self, encoding=DEFAULT_OUTPUT_ENCODING):
return "<!--%s-->" % NavigableString.__str__(self, encoding)
class Declaration(NavigableString):
def __str__(self, encoding=DEFAULT_OUTPUT_ENCODING):
return "<!%s>" % NavigableString.__str__(self, encoding)
class Tag(PageElement):
"""Represents a found HTML tag with its attributes and contents."""
def _convertEntities(self, match):
"""Used in a call to re.sub to replace HTML, XML, and numeric
entities with the appropriate Unicode characters. If HTML
entities are being converted, any unrecognized entities are
escaped."""
x = match.group(1)
if self.convertHTMLEntities and x in name2codepoint:
return unichr(name2codepoint[x])
elif x in self.XML_ENTITIES_TO_SPECIAL_CHARS:
if self.convertXMLEntities:
return self.XML_ENTITIES_TO_SPECIAL_CHARS[x]
else:
return u'&%s;' % x
elif len(x) > 0 and x[0] == '#':
# Handle numeric entities
if len(x) > 1 and x[1] == 'x':
return unichr(int(x[2:], 16))
else:
return unichr(int(x[1:]))
elif self.escapeUnrecognizedEntities:
return u'&%s;' % x
else:
return u'&%s;' % x
def __init__(self, parser, name, attrs=None, parent=None,
previous=None):
"Basic constructor."
# We don't actually store the parser object: that lets extracted
# chunks be garbage-collected
self.parserClass = parser.__class__
self.isSelfClosing = parser.isSelfClosingTag(name)
self.name = name
if attrs is None:
attrs = []
elif isinstance(attrs, dict):
attrs = attrs.items()
self.attrs = attrs
self.contents = []
self.setup(parent, previous)
self.hidden = False
self.containsSubstitutions = False
self.convertHTMLEntities = parser.convertHTMLEntities
self.convertXMLEntities = parser.convertXMLEntities
self.escapeUnrecognizedEntities = parser.escapeUnrecognizedEntities
# Convert any HTML, XML, or numeric entities in the attribute values.
convert = lambda(k, val): (k,
re.sub("&(#\d+|#x[0-9a-fA-F]+|\w+);",
self._convertEntities,
val))
self.attrs = map(convert, self.attrs)
def getString(self):
if (len(self.contents) == 1
and isinstance(self.contents[0], NavigableString)):
return self.contents[0]
def setString(self, string):
"""Replace the contents of the tag with a string"""
self.clear()
self.append(string)
string = property(getString, setString)
def getText(self, separator=u""):
if not len(self.contents):
return u""
stopNode = self._lastRecursiveChild().next
strings = []
current = self.contents[0]
while current is not stopNode:
if isinstance(current, NavigableString):
strings.append(current.strip())
current = current.next
return separator.join(strings)
text = property(getText)
def get(self, key, default=None):
"""Returns the value of the 'key' attribute for the tag, or
the value given for 'default' if it doesn't have that
attribute."""
return self._getAttrMap().get(key, default)
def clear(self):
"""Extract all children."""
for child in self.contents[:]:
child.extract()
def index(self, element):
for i, child in enumerate(self.contents):
if child is element:
return i
raise ValueError("Tag.index: element not in tag")
def has_key(self, key):
return self._getAttrMap().has_key(key)
def __getitem__(self, key):
"""tag[key] returns the value of the 'key' attribute for the tag,
and throws an exception if it's not there."""
return self._getAttrMap()[key]
def __iter__(self):
"Iterating over a tag iterates over its contents."
return iter(self.contents)
def __len__(self):
"The length of a tag is the length of its list of contents."
return len(self.contents)
def __contains__(self, x):
return x in self.contents
def __nonzero__(self):
"A tag is non-None even if it has no contents."
return True
def __setitem__(self, key, value):
"""Setting tag[key] sets the value of the 'key' attribute for the
tag."""
self._getAttrMap()
self.attrMap[key] = value
found = False
for i in range(0, len(self.attrs)):
if self.attrs[i][0] == key:
self.attrs[i] = (key, value)
found = True
if not found:
self.attrs.append((key, value))
self._getAttrMap()[key] = value
def __delitem__(self, key):
"Deleting tag[key] deletes all 'key' attributes for the tag."
for item in self.attrs:
if item[0] == key:
self.attrs.remove(item)
#We don't break because bad HTML can define the same
#attribute multiple times.
self._getAttrMap()
if self.attrMap.has_key(key):
del self.attrMap[key]
def __call__(self, *args, **kwargs):
"""Calling a tag like a function is the same as calling its
findAll() method. Eg. tag('a') returns a list of all the A tags
found within this tag."""
return apply(self.findAll, args, kwargs)
def __getattr__(self, tag):
#print "Getattr %s.%s" % (self.__class__, tag)
if len(tag) > 3 and tag.rfind('Tag') == len(tag)-3:
return self.find(tag[:-3])
elif tag.find('__') != 0:
return self.find(tag)
raise AttributeError, "'%s' object has no attribute '%s'" % (self.__class__, tag)
def __eq__(self, other):
"""Returns true iff this tag has the same name, the same attributes,
and the same contents (recursively) as the given tag.
NOTE: right now this will return false if two tags have the
same attributes in a different order. Should this be fixed?"""
if other is self:
return True
if not hasattr(other, 'name') or not hasattr(other, 'attrs') or not hasattr(other, 'contents') or self.name != other.name or self.attrs != other.attrs or len(self) != len(other):
return False
for i in range(0, len(self.contents)):
if self.contents[i] != other.contents[i]:
return False
return True
def __ne__(self, other):
"""Returns true iff this tag is not identical to the other tag,
as defined in __eq__."""
return not self == other
def __repr__(self, encoding=DEFAULT_OUTPUT_ENCODING):
"""Renders this tag as a string."""
return self.__str__(encoding)
def __unicode__(self):
return self.__str__(None)
def __str__(self, encoding=DEFAULT_OUTPUT_ENCODING,
prettyPrint=False, indentLevel=0):
"""Returns a string or Unicode representation of this tag and
its contents. To get Unicode, pass None for encoding.
NOTE: since Python's HTML parser consumes whitespace, this
method is not certain to reproduce the whitespace present in
the original string."""
encodedName = self.toEncoding(self.name, encoding)
attrs = []
if self.attrs:
for key, val in self.attrs:
fmt = '%s="%s"'
if isinstance(val, basestring):
if self.containsSubstitutions and '%SOUP-ENCODING%' in val:
val = self.substituteEncoding(val, encoding)
# The attribute value either:
#
# * Contains no embedded double quotes or single quotes.
# No problem: we enclose it in double quotes.
# * Contains embedded single quotes. No problem:
# double quotes work here too.
# * Contains embedded double quotes. No problem:
# we enclose it in single quotes.
# * Embeds both single _and_ double quotes. This
# can't happen naturally, but it can happen if
# you modify an attribute value after parsing
# the document. Now we have a bit of a
# problem. We solve it by enclosing the
# attribute in single quotes, and escaping any
# embedded single quotes to XML entities.
if '"' in val:
fmt = "%s='%s'"
if "'" in val:
# TODO: replace with apos when
# appropriate.
val = val.replace("'", "&squot;")
# Now we're okay w/r/t quotes. But the attribute
# value might also contain angle brackets, or
# ampersands that aren't part of entities. We need
# to escape those to XML entities too.
val = self.BARE_AMPERSAND_OR_BRACKET.sub(self._sub_entity, val)
attrs.append(fmt % (self.toEncoding(key, encoding),
self.toEncoding(val, encoding)))
close = ''
closeTag = ''
if self.isSelfClosing:
close = ' /'
else:
closeTag = '</%s>' % encodedName
indentTag, indentContents = 0, 0
if prettyPrint:
indentTag = indentLevel
space = (' ' * (indentTag-1))
indentContents = indentTag + 1
contents = self.renderContents(encoding, prettyPrint, indentContents)
if self.hidden:
s = contents
else:
s = []
attributeString = ''
if attrs:
attributeString = ' ' + ' '.join(attrs)
if prettyPrint:
s.append(space)
s.append('<%s%s%s>' % (encodedName, attributeString, close))
if prettyPrint:
s.append("\n")
s.append(contents)
if prettyPrint and contents and contents[-1] != "\n":
s.append("\n")
if prettyPrint and closeTag:
s.append(space)
s.append(closeTag)
if prettyPrint and closeTag and self.nextSibling:
s.append("\n")
s = ''.join(s)
return s
def decompose(self):
"""Recursively destroys the contents of this tree."""
self.extract()
if len(self.contents) == 0:
return
current = self.contents[0]
while current is not None:
next = current.next
if isinstance(current, Tag):
del current.contents[:]
current.parent = None
current.previous = None
current.previousSibling = None
current.next = None
current.nextSibling = None
current = next
def prettify(self, encoding=DEFAULT_OUTPUT_ENCODING):
return self.__str__(encoding, True)
def renderContents(self, encoding=DEFAULT_OUTPUT_ENCODING,
prettyPrint=False, indentLevel=0):
"""Renders the contents of this tag as a string in the given
encoding. If encoding is None, returns a Unicode string.."""
s=[]
for c in self:
text = None
if isinstance(c, NavigableString):
text = c.__str__(encoding)
elif isinstance(c, Tag):
s.append(c.__str__(encoding, prettyPrint, indentLevel))
if text and prettyPrint:
text = text.strip()
if text:
if prettyPrint:
s.append(" " * (indentLevel-1))
s.append(text)
if prettyPrint:
s.append("\n")
return ''.join(s)
#Soup methods
def find(self, name=None, attrs={}, recursive=True, text=None,
**kwargs):
"""Return only the first child of this Tag matching the given
criteria."""
r = None
l = self.findAll(name, attrs, recursive, text, 1, **kwargs)
if l:
r = l[0]
return r
findChild = find
def findAll(self, name=None, attrs={}, recursive=True, text=None,
limit=None, **kwargs):
"""Extracts a list of Tag objects that match the given
criteria. You can specify the name of the Tag and any
attributes you want the Tag to have.
The value of a key-value pair in the 'attrs' map can be a
string, a list of strings, a regular expression object, or a
callable that takes a string and returns whether or not the
string matches for some custom definition of 'matches'. The
same is true of the tag name."""
generator = self.recursiveChildGenerator
if not recursive:
generator = self.childGenerator
return self._findAll(name, attrs, text, limit, generator, **kwargs)
findChildren = findAll
# Pre-3.x compatibility methods
first = find
fetch = findAll
def fetchText(self, text=None, recursive=True, limit=None):
return self.findAll(text=text, recursive=recursive, limit=limit)
def firstText(self, text=None, recursive=True):
return self.find(text=text, recursive=recursive)
#Private methods
def _getAttrMap(self):
"""Initializes a map representation of this tag's attributes,
if not already initialized."""
if not getattr(self, 'attrMap'):
self.attrMap = {}
for (key, value) in self.attrs:
self.attrMap[key] = value
return self.attrMap
#Generator methods
def childGenerator(self):
# Just use the iterator from the contents
return iter(self.contents)
def recursiveChildGenerator(self):
if not len(self.contents):
raise StopIteration
stopNode = self._lastRecursiveChild().next
current = self.contents[0]
while current is not stopNode:
yield current
current = current.next
# Next, a couple classes to represent queries and their results.
class SoupStrainer:
"""Encapsulates a number of ways of matching a markup element (tag or
text)."""
def __init__(self, name=None, attrs={}, text=None, **kwargs):
self.name = name
if isinstance(attrs, basestring):
kwargs['class'] = _match_css_class(attrs)
attrs = None
if kwargs:
if attrs:
attrs = attrs.copy()
attrs.update(kwargs)
else:
attrs = kwargs
self.attrs = attrs
self.text = text
def __str__(self):
if self.text:
return self.text
else:
return "%s|%s" % (self.name, self.attrs)
def searchTag(self, markupName=None, markupAttrs={}):
found = None
markup = None
if isinstance(markupName, Tag):
markup = markupName
markupAttrs = markup
callFunctionWithTagData = callable(self.name) \
and not isinstance(markupName, Tag)
if (not self.name) \
or callFunctionWithTagData \
or (markup and self._matches(markup, self.name)) \
or (not markup and self._matches(markupName, self.name)):
if callFunctionWithTagData:
match = self.name(markupName, markupAttrs)
else:
match = True
markupAttrMap = None
for attr, matchAgainst in self.attrs.items():
if not markupAttrMap:
if hasattr(markupAttrs, 'get'):
markupAttrMap = markupAttrs
else:
markupAttrMap = {}
for k,v in markupAttrs:
markupAttrMap[k] = v
attrValue = markupAttrMap.get(attr)
if not self._matches(attrValue, matchAgainst):
match = False
break
if match:
if markup:
found = markup
else:
found = markupName
return found
def search(self, markup):
#print 'looking for %s in %s' % (self, markup)
found = None
# If given a list of items, scan it for a text element that
# matches.
if hasattr(markup, "__iter__") \
and not isinstance(markup, Tag):
for element in markup:
if isinstance(element, NavigableString) \
and self.search(element):
found = element
break
# If it's a Tag, make sure its name or attributes match.
# Don't bother with Tags if we're searching for text.
elif isinstance(markup, Tag):
if not self.text:
found = self.searchTag(markup)
# If it's text, make sure the text matches.
elif isinstance(markup, NavigableString) or \
isinstance(markup, basestring):
if self._matches(markup, self.text):
found = markup
else:
raise Exception, "I don't know how to match against a %s" \
% markup.__class__
return found
def _matches(self, markup, matchAgainst):
#print "Matching %s against %s" % (markup, matchAgainst)
result = False
if matchAgainst is True:
result = markup is not None
elif callable(matchAgainst):
result = matchAgainst(markup)
else:
#Custom match methods take the tag as an argument, but all
#other ways of matching match the tag name as a string.
if isinstance(markup, Tag):
markup = markup.name
if markup and not isinstance(markup, basestring):
markup = unicode(markup)
#Now we know that chunk is either a string, or None.
if hasattr(matchAgainst, 'match'):
# It's a regexp object.
result = markup and matchAgainst.search(markup)
elif hasattr(matchAgainst, '__iter__'): # list-like
result = markup in matchAgainst
elif hasattr(matchAgainst, 'items'):
result = markup.has_key(matchAgainst)
elif matchAgainst and isinstance(markup, basestring):
if isinstance(markup, unicode):
matchAgainst = unicode(matchAgainst)
else:
matchAgainst = str(matchAgainst)
if not result:
result = matchAgainst == markup
return result
class ResultSet(list):
"""A ResultSet is just a list that keeps track of the SoupStrainer
that created it."""
def __init__(self, source):
list.__init__([])
self.source = source
# Now, some helper functions.
def buildTagMap(default, *args):
"""Turns a list of maps, lists, or scalars into a single map.
Used to build the SELF_CLOSING_TAGS, NESTABLE_TAGS, and
NESTING_RESET_TAGS maps out of lists and partial maps."""
built = {}
for portion in args:
if hasattr(portion, 'items'):
#It's a map. Merge it.
for k,v in portion.items():
built[k] = v
elif hasattr(portion, '__iter__'): # is a list
#It's a list. Map each item to the default.
for k in portion:
built[k] = default
else:
#It's a scalar. Map it to the default.
built[portion] = default
return built
# Now, the parser classes.
class BeautifulStoneSoup(Tag, SGMLParser):
"""This class contains the basic parser and search code. It defines
a parser that knows nothing about tag behavior except for the
following:
You can't close a tag without closing all the tags it encloses.
That is, "<foo><bar></foo>" actually means
"<foo><bar></bar></foo>".
[Another possible explanation is "<foo><bar /></foo>", but since
this class defines no SELF_CLOSING_TAGS, it will never use that
explanation.]
This class is useful for parsing XML or made-up markup languages,
or when BeautifulSoup makes an assumption counter to what you were
expecting."""
SELF_CLOSING_TAGS = {}
NESTABLE_TAGS = {}
RESET_NESTING_TAGS = {}
QUOTE_TAGS = {}
PRESERVE_WHITESPACE_TAGS = []
MARKUP_MASSAGE = [(re.compile('(<[^<>]*)/>'),
lambda x: x.group(1) + ' />'),
(re.compile('<!\s+([^<>]*)>'),
lambda x: '<!' + x.group(1) + '>')
]
ROOT_TAG_NAME = u'[document]'
HTML_ENTITIES = "html"
XML_ENTITIES = "xml"
XHTML_ENTITIES = "xhtml"
# TODO: This only exists for backwards-compatibility
ALL_ENTITIES = XHTML_ENTITIES
# Used when determining whether a text node is all whitespace and
# can be replaced with a single space. A text node that contains
# fancy Unicode spaces (usually non-breaking) should be left
# alone.
STRIP_ASCII_SPACES = { 9: None, 10: None, 12: None, 13: None, 32: None, }
def __init__(self, markup="", parseOnlyThese=None, fromEncoding=None,
markupMassage=True, smartQuotesTo=XML_ENTITIES,
convertEntities=None, selfClosingTags=None, isHTML=False):
"""The Soup object is initialized as the 'root tag', and the
provided markup (which can be a string or a file-like object)
is fed into the underlying parser.
sgmllib will process most bad HTML, and the BeautifulSoup
class has some tricks for dealing with some HTML that kills
sgmllib, but Beautiful Soup can nonetheless choke or lose data
if your data uses self-closing tags or declarations
incorrectly.
By default, Beautiful Soup uses regexes to sanitize input,
avoiding the vast majority of these problems. If the problems
don't apply to you, pass in False for markupMassage, and
you'll get better performance.
The default parser massage techniques fix the two most common
instances of invalid HTML that choke sgmllib:
<br/> (No space between name of closing tag and tag close)
<! --Comment--> (Extraneous whitespace in declaration)
You can pass in a custom list of (RE object, replace method)
tuples to get Beautiful Soup to scrub your input the way you
want."""
self.parseOnlyThese = parseOnlyThese
self.fromEncoding = fromEncoding
self.smartQuotesTo = smartQuotesTo
self.convertEntities = convertEntities
# Set the rules for how we'll deal with the entities we
# encounter
if self.convertEntities:
# It doesn't make sense to convert encoded characters to
# entities even while you're converting entities to Unicode.
# Just convert it all to Unicode.
self.smartQuotesTo = None
if convertEntities == self.HTML_ENTITIES:
self.convertXMLEntities = False
self.convertHTMLEntities = True
self.escapeUnrecognizedEntities = True
elif convertEntities == self.XHTML_ENTITIES:
self.convertXMLEntities = True
self.convertHTMLEntities = True
self.escapeUnrecognizedEntities = False
elif convertEntities == self.XML_ENTITIES:
self.convertXMLEntities = True
self.convertHTMLEntities = False
self.escapeUnrecognizedEntities = False
else:
self.convertXMLEntities = False
self.convertHTMLEntities = False
self.escapeUnrecognizedEntities = False
self.instanceSelfClosingTags = buildTagMap(None, selfClosingTags)
SGMLParser.__init__(self)
if hasattr(markup, 'read'): # It's a file-type object.
markup = markup.read()
self.markup = markup
self.markupMassage = markupMassage
try:
self._feed(isHTML=isHTML)
except StopParsing:
pass
self.markup = None # The markup can now be GCed
def convert_charref(self, name):
"""This method fixes a bug in Python's SGMLParser."""
try:
n = int(name)
except ValueError:
return
if not 0 <= n <= 127 : # ASCII ends at 127, not 255
return
return self.convert_codepoint(n)
def _feed(self, inDocumentEncoding=None, isHTML=False):
# Convert the document to Unicode.
markup = self.markup
if isinstance(markup, unicode):
if not hasattr(self, 'originalEncoding'):
self.originalEncoding = None
else:
dammit = UnicodeDammit\
(markup, [self.fromEncoding, inDocumentEncoding],
smartQuotesTo=self.smartQuotesTo, isHTML=isHTML)
markup = dammit.unicode
self.originalEncoding = dammit.originalEncoding
self.declaredHTMLEncoding = dammit.declaredHTMLEncoding
if markup:
if self.markupMassage:
if not hasattr(self.markupMassage, "__iter__"):
self.markupMassage = self.MARKUP_MASSAGE
for fix, m in self.markupMassage:
markup = fix.sub(m, markup)
# TODO: We get rid of markupMassage so that the
# soup object can be deepcopied later on. Some
# Python installations can't copy regexes. If anyone
# was relying on the existence of markupMassage, this
# might cause problems.
del(self.markupMassage)
self.reset()
SGMLParser.feed(self, markup)
# Close out any unfinished strings and close all the open tags.
self.endData()
while self.currentTag.name != self.ROOT_TAG_NAME:
self.popTag()
def __getattr__(self, methodName):
"""This method routes method call requests to either the SGMLParser
superclass or the Tag superclass, depending on the method name."""
#print "__getattr__ called on %s.%s" % (self.__class__, methodName)
if methodName.startswith('start_') or methodName.startswith('end_') \
or methodName.startswith('do_'):
return SGMLParser.__getattr__(self, methodName)
elif not methodName.startswith('__'):
return Tag.__getattr__(self, methodName)
else:
raise AttributeError
def isSelfClosingTag(self, name):
"""Returns true iff the given string is the name of a
self-closing tag according to this parser."""
return self.SELF_CLOSING_TAGS.has_key(name) \
or self.instanceSelfClosingTags.has_key(name)
def reset(self):
Tag.__init__(self, self, self.ROOT_TAG_NAME)
self.hidden = 1
SGMLParser.reset(self)
self.currentData = []
self.currentTag = None
self.tagStack = []
self.quoteStack = []
self.pushTag(self)
def popTag(self):
tag = self.tagStack.pop()
#print "Pop", tag.name
if self.tagStack:
self.currentTag = self.tagStack[-1]
return self.currentTag
def pushTag(self, tag):
#print "Push", tag.name
if self.currentTag:
self.currentTag.contents.append(tag)
self.tagStack.append(tag)
self.currentTag = self.tagStack[-1]
def endData(self, containerClass=NavigableString):
if self.currentData:
currentData = u''.join(self.currentData)
if (currentData.translate(self.STRIP_ASCII_SPACES) == '' and
not set([tag.name for tag in self.tagStack]).intersection(
self.PRESERVE_WHITESPACE_TAGS)):
if '\n' in currentData:
currentData = '\n'
else:
currentData = ' '
self.currentData = []
if self.parseOnlyThese and len(self.tagStack) <= 1 and \
(not self.parseOnlyThese.text or \
not self.parseOnlyThese.search(currentData)):
return
o = containerClass(currentData)
o.setup(self.currentTag, self.previous)
if self.previous:
self.previous.next = o
self.previous = o
self.currentTag.contents.append(o)
def _popToTag(self, name, inclusivePop=True):
"""Pops the tag stack up to and including the most recent
instance of the given tag. If inclusivePop is false, pops the tag
stack up to but *not* including the most recent instqance of
the given tag."""
#print "Popping to %s" % name
if name == self.ROOT_TAG_NAME:
return
numPops = 0
mostRecentTag = None
for i in range(len(self.tagStack)-1, 0, -1):
if name == self.tagStack[i].name:
numPops = len(self.tagStack)-i
break
if not inclusivePop:
numPops = numPops - 1
for i in range(0, numPops):
mostRecentTag = self.popTag()
return mostRecentTag
def _smartPop(self, name):
"""We need to pop up to the previous tag of this type, unless
one of this tag's nesting reset triggers comes between this
tag and the previous tag of this type, OR unless this tag is a
generic nesting trigger and another generic nesting trigger
comes between this tag and the previous tag of this type.
Examples:
<p>Foo<b>Bar *<p>* should pop to 'p', not 'b'.
<p>Foo<table>Bar *<p>* should pop to 'table', not 'p'.
<p>Foo<table><tr>Bar *<p>* should pop to 'tr', not 'p'.
<li><ul><li> *<li>* should pop to 'ul', not the first 'li'.
<tr><table><tr> *<tr>* should pop to 'table', not the first 'tr'
<td><tr><td> *<td>* should pop to 'tr', not the first 'td'
"""
nestingResetTriggers = self.NESTABLE_TAGS.get(name)
isNestable = nestingResetTriggers != None
isResetNesting = self.RESET_NESTING_TAGS.has_key(name)
popTo = None
inclusive = True
for i in range(len(self.tagStack)-1, 0, -1):
p = self.tagStack[i]
if (not p or p.name == name) and not isNestable:
#Non-nestable tags get popped to the top or to their
#last occurance.
popTo = name
break
if (nestingResetTriggers is not None
and p.name in nestingResetTriggers) \
or (nestingResetTriggers is None and isResetNesting
and self.RESET_NESTING_TAGS.has_key(p.name)):
#If we encounter one of the nesting reset triggers
#peculiar to this tag, or we encounter another tag
#that causes nesting to reset, pop up to but not
#including that tag.
popTo = p.name
inclusive = False
break
p = p.parent
if popTo:
self._popToTag(popTo, inclusive)
def unknown_starttag(self, name, attrs, selfClosing=0):
#print "Start tag %s: %s" % (name, attrs)
if self.quoteStack:
#This is not a real tag.
#print "<%s> is not real!" % name
attrs = ''.join([' %s="%s"' % (x, y) for x, y in attrs])
self.handle_data('<%s%s>' % (name, attrs))
return
self.endData()
if not self.isSelfClosingTag(name) and not selfClosing:
self._smartPop(name)
if self.parseOnlyThese and len(self.tagStack) <= 1 \
and (self.parseOnlyThese.text or not self.parseOnlyThese.searchTag(name, attrs)):
return
tag = Tag(self, name, attrs, self.currentTag, self.previous)
if self.previous:
self.previous.next = tag
self.previous = tag
self.pushTag(tag)
if selfClosing or self.isSelfClosingTag(name):
self.popTag()
if name in self.QUOTE_TAGS:
#print "Beginning quote (%s)" % name
self.quoteStack.append(name)
self.literal = 1
return tag
def unknown_endtag(self, name):
#print "End tag %s" % name
if self.quoteStack and self.quoteStack[-1] != name:
#This is not a real end tag.
#print "</%s> is not real!" % name
self.handle_data('</%s>' % name)
return
self.endData()
self._popToTag(name)
if self.quoteStack and self.quoteStack[-1] == name:
self.quoteStack.pop()
self.literal = (len(self.quoteStack) > 0)
def handle_data(self, data):
self.currentData.append(data)
def _toStringSubclass(self, text, subclass):
"""Adds a certain piece of text to the tree as a NavigableString
subclass."""
self.endData()
self.handle_data(text)
self.endData(subclass)
def handle_pi(self, text):
"""Handle a processing instruction as a ProcessingInstruction
object, possibly one with a %SOUP-ENCODING% slot into which an
encoding will be plugged later."""
if text[:3] == "xml":
text = u"xml version='1.0' encoding='%SOUP-ENCODING%'"
self._toStringSubclass(text, ProcessingInstruction)
def handle_comment(self, text):
"Handle comments as Comment objects."
self._toStringSubclass(text, Comment)
def handle_charref(self, ref):
"Handle character references as data."
if self.convertEntities:
data = unichr(int(ref))
else:
data = '&#%s;' % ref
self.handle_data(data)
def handle_entityref(self, ref):
"""Handle entity references as data, possibly converting known
HTML and/or XML entity references to the corresponding Unicode
characters."""
data = None
if self.convertHTMLEntities:
try:
data = unichr(name2codepoint[ref])
except KeyError:
pass
if not data and self.convertXMLEntities:
data = self.XML_ENTITIES_TO_SPECIAL_CHARS.get(ref)
if not data and self.convertHTMLEntities and \
not self.XML_ENTITIES_TO_SPECIAL_CHARS.get(ref):
# TODO: We've got a problem here. We're told this is
# an entity reference, but it's not an XML entity
# reference or an HTML entity reference. Nonetheless,
# the logical thing to do is to pass it through as an
# unrecognized entity reference.
#
# Except: when the input is "&carol;" this function
# will be called with input "carol". When the input is
# "AT&T", this function will be called with input
# "T". We have no way of knowing whether a semicolon
# was present originally, so we don't know whether
# this is an unknown entity or just a misplaced
# ampersand.
#
# The more common case is a misplaced ampersand, so I
# escape the ampersand and omit the trailing semicolon.
data = "&%s" % ref
if not data:
# This case is different from the one above, because we
# haven't already gone through a supposedly comprehensive
# mapping of entities to Unicode characters. We might not
# have gone through any mapping at all. So the chances are
# very high that this is a real entity, and not a
# misplaced ampersand.
data = "&%s;" % ref
self.handle_data(data)
def handle_decl(self, data):
"Handle DOCTYPEs and the like as Declaration objects."
self._toStringSubclass(data, Declaration)
def parse_declaration(self, i):
"""Treat a bogus SGML declaration as raw data. Treat a CDATA
declaration as a CData object."""
j = None
if self.rawdata[i:i+9] == '<![CDATA[':
k = self.rawdata.find(']]>', i)
if k == -1:
k = len(self.rawdata)
data = self.rawdata[i+9:k]
j = k+3
self._toStringSubclass(data, CData)
else:
try:
j = SGMLParser.parse_declaration(self, i)
except SGMLParseError:
toHandle = self.rawdata[i:]
self.handle_data(toHandle)
j = i + len(toHandle)
return j
class BeautifulSoup(BeautifulStoneSoup):
"""This parser knows the following facts about HTML:
* Some tags have no closing tag and should be interpreted as being
closed as soon as they are encountered.
* The text inside some tags (ie. 'script') may contain tags which
are not really part of the document and which should be parsed
as text, not tags. If you want to parse the text as tags, you can
always fetch it and parse it explicitly.
* Tag nesting rules:
Most tags can't be nested at all. For instance, the occurance of
a <p> tag should implicitly close the previous <p> tag.
<p>Para1<p>Para2
should be transformed into:
<p>Para1</p><p>Para2
Some tags can be nested arbitrarily. For instance, the occurance
of a <blockquote> tag should _not_ implicitly close the previous
<blockquote> tag.
Alice said: <blockquote>Bob said: <blockquote>Blah
should NOT be transformed into:
Alice said: <blockquote>Bob said: </blockquote><blockquote>Blah
Some tags can be nested, but the nesting is reset by the
interposition of other tags. For instance, a <tr> tag should
implicitly close the previous <tr> tag within the same <table>,
but not close a <tr> tag in another table.
<table><tr>Blah<tr>Blah
should be transformed into:
<table><tr>Blah</tr><tr>Blah
but,
<tr>Blah<table><tr>Blah
should NOT be transformed into
<tr>Blah<table></tr><tr>Blah
Differing assumptions about tag nesting rules are a major source
of problems with the BeautifulSoup class. If BeautifulSoup is not
treating as nestable a tag your page author treats as nestable,
try ICantBelieveItsBeautifulSoup, MinimalSoup, or
BeautifulStoneSoup before writing your own subclass."""
def __init__(self, *args, **kwargs):
if not kwargs.has_key('smartQuotesTo'):
kwargs['smartQuotesTo'] = self.HTML_ENTITIES
kwargs['isHTML'] = True
BeautifulStoneSoup.__init__(self, *args, **kwargs)
SELF_CLOSING_TAGS = buildTagMap(None,
('br' , 'hr', 'input', 'img', 'meta',
'spacer', 'link', 'frame', 'base', 'col'))
PRESERVE_WHITESPACE_TAGS = set(['pre', 'textarea'])
QUOTE_TAGS = {'script' : None, 'textarea' : None}
#According to the HTML standard, each of these inline tags can
#contain another tag of the same type. Furthermore, it's common
#to actually use these tags this way.
NESTABLE_INLINE_TAGS = ('span', 'font', 'q', 'object', 'bdo', 'sub', 'sup',
'center')
#According to the HTML standard, these block tags can contain
#another tag of the same type. Furthermore, it's common
#to actually use these tags this way.
NESTABLE_BLOCK_TAGS = ('blockquote', 'div', 'fieldset', 'ins', 'del')
#Lists can contain other lists, but there are restrictions.
NESTABLE_LIST_TAGS = { 'ol' : [],
'ul' : [],
'li' : ['ul', 'ol'],
'dl' : [],
'dd' : ['dl'],
'dt' : ['dl'] }
#Tables can contain other tables, but there are restrictions.
NESTABLE_TABLE_TAGS = {'table' : [],
'tr' : ['table', 'tbody', 'tfoot', 'thead'],
'td' : ['tr'],
'th' : ['tr'],
'thead' : ['table'],
'tbody' : ['table'],
'tfoot' : ['table'],
}
NON_NESTABLE_BLOCK_TAGS = ('address', 'form', 'p', 'pre')
#If one of these tags is encountered, all tags up to the next tag of
#this type are popped.
RESET_NESTING_TAGS = buildTagMap(None, NESTABLE_BLOCK_TAGS, 'noscript',
NON_NESTABLE_BLOCK_TAGS,
NESTABLE_LIST_TAGS,
NESTABLE_TABLE_TAGS)
NESTABLE_TAGS = buildTagMap([], NESTABLE_INLINE_TAGS, NESTABLE_BLOCK_TAGS,
NESTABLE_LIST_TAGS, NESTABLE_TABLE_TAGS)
# Used to detect the charset in a META tag; see start_meta
CHARSET_RE = re.compile("((^|;)\s*charset=)([^;]*)", re.M)
def start_meta(self, attrs):
"""Beautiful Soup can detect a charset included in a META tag,
try to convert the document to that charset, and re-parse the
document from the beginning."""
httpEquiv = None
contentType = None
contentTypeIndex = None
tagNeedsEncodingSubstitution = False
for i in range(0, len(attrs)):
key, value = attrs[i]
key = key.lower()
if key == 'http-equiv':
httpEquiv = value
elif key == 'content':
contentType = value
contentTypeIndex = i
if httpEquiv and contentType: # It's an interesting meta tag.
match = self.CHARSET_RE.search(contentType)
if match:
if (self.declaredHTMLEncoding is not None or
self.originalEncoding == self.fromEncoding):
# An HTML encoding was sniffed while converting
# the document to Unicode, or an HTML encoding was
# sniffed during a previous pass through the
# document, or an encoding was specified
# explicitly and it worked. Rewrite the meta tag.
def rewrite(match):
return match.group(1) + "%SOUP-ENCODING%"
newAttr = self.CHARSET_RE.sub(rewrite, contentType)
attrs[contentTypeIndex] = (attrs[contentTypeIndex][0],
newAttr)
tagNeedsEncodingSubstitution = True
else:
# This is our first pass through the document.
# Go through it again with the encoding information.
newCharset = match.group(3)
if newCharset and newCharset != self.originalEncoding:
self.declaredHTMLEncoding = newCharset
self._feed(self.declaredHTMLEncoding)
raise StopParsing
pass
tag = self.unknown_starttag("meta", attrs)
if tag and tagNeedsEncodingSubstitution:
tag.containsSubstitutions = True
class StopParsing(Exception):
pass
class ICantBelieveItsBeautifulSoup(BeautifulSoup):
"""The BeautifulSoup class is oriented towards skipping over
common HTML errors like unclosed tags. However, sometimes it makes
errors of its own. For instance, consider this fragment:
<b>Foo<b>Bar</b></b>
This is perfectly valid (if bizarre) HTML. However, the
BeautifulSoup class will implicitly close the first b tag when it
encounters the second 'b'. It will think the author wrote
"<b>Foo<b>Bar", and didn't close the first 'b' tag, because
there's no real-world reason to bold something that's already
bold. When it encounters '</b></b>' it will close two more 'b'
tags, for a grand total of three tags closed instead of two. This
can throw off the rest of your document structure. The same is
true of a number of other tags, listed below.
It's much more common for someone to forget to close a 'b' tag
than to actually use nested 'b' tags, and the BeautifulSoup class
handles the common case. This class handles the not-co-common
case: where you can't believe someone wrote what they did, but
it's valid HTML and BeautifulSoup screwed up by assuming it
wouldn't be."""
I_CANT_BELIEVE_THEYRE_NESTABLE_INLINE_TAGS = \
('em', 'big', 'i', 'small', 'tt', 'abbr', 'acronym', 'strong',
'cite', 'code', 'dfn', 'kbd', 'samp', 'strong', 'var', 'b',
'big')
I_CANT_BELIEVE_THEYRE_NESTABLE_BLOCK_TAGS = ('noscript',)
NESTABLE_TAGS = buildTagMap([], BeautifulSoup.NESTABLE_TAGS,
I_CANT_BELIEVE_THEYRE_NESTABLE_BLOCK_TAGS,
I_CANT_BELIEVE_THEYRE_NESTABLE_INLINE_TAGS)
class MinimalSoup(BeautifulSoup):
"""The MinimalSoup class is for parsing HTML that contains
pathologically bad markup. It makes no assumptions about tag
nesting, but it does know which tags are self-closing, that
<script> tags contain Javascript and should not be parsed, that
META tags may contain encoding information, and so on.
This also makes it better for subclassing than BeautifulStoneSoup
or BeautifulSoup."""
RESET_NESTING_TAGS = buildTagMap('noscript')
NESTABLE_TAGS = {}
class BeautifulSOAP(BeautifulStoneSoup):
"""This class will push a tag with only a single string child into
the tag's parent as an attribute. The attribute's name is the tag
name, and the value is the string child. An example should give
the flavor of the change:
<foo><bar>baz</bar></foo>
=>
<foo bar="baz"><bar>baz</bar></foo>
You can then access fooTag['bar'] instead of fooTag.barTag.string.
This is, of course, useful for scraping structures that tend to
use subelements instead of attributes, such as SOAP messages. Note
that it modifies its input, so don't print the modified version
out.
I'm not sure how many people really want to use this class; let me
know if you do. Mainly I like the name."""
def popTag(self):
if len(self.tagStack) > 1:
tag = self.tagStack[-1]
parent = self.tagStack[-2]
parent._getAttrMap()
if (isinstance(tag, Tag) and len(tag.contents) == 1 and
isinstance(tag.contents[0], NavigableString) and
not parent.attrMap.has_key(tag.name)):
parent[tag.name] = tag.contents[0]
BeautifulStoneSoup.popTag(self)
#Enterprise class names! It has come to our attention that some people
#think the names of the Beautiful Soup parser classes are too silly
#and "unprofessional" for use in enterprise screen-scraping. We feel
#your pain! For such-minded folk, the Beautiful Soup Consortium And
#All-Night Kosher Bakery recommends renaming this file to
#"RobustParser.py" (or, in cases of extreme enterprisiness,
#"RobustParserBeanInterface.class") and using the following
#enterprise-friendly class aliases:
class RobustXMLParser(BeautifulStoneSoup):
pass
class RobustHTMLParser(BeautifulSoup):
pass
class RobustWackAssHTMLParser(ICantBelieveItsBeautifulSoup):
pass
class RobustInsanelyWackAssHTMLParser(MinimalSoup):
pass
class SimplifyingSOAPParser(BeautifulSOAP):
pass
######################################################
#
# Bonus library: Unicode, Dammit
#
# This class forces XML data into a standard format (usually to UTF-8
# or Unicode). It is heavily based on code from Mark Pilgrim's
# Universal Feed Parser. It does not rewrite the XML or HTML to
# reflect a new encoding: that happens in BeautifulStoneSoup.handle_pi
# (XML) and BeautifulSoup.start_meta (HTML).
# Autodetects character encodings.
# Download from http://chardet.feedparser.org/
try:
import chardet
# import chardet.constants
# chardet.constants._debug = 1
except ImportError:
chardet = None
# cjkcodecs and iconv_codec make Python know about more character encodings.
# Both are available from http://cjkpython.i18n.org/
# They're built in if you use Python 2.4.
try:
import cjkcodecs.aliases
except ImportError:
pass
try:
import iconv_codec
except ImportError:
pass
class UnicodeDammit:
"""A class for detecting the encoding of a *ML document and
converting it to a Unicode string. If the source encoding is
windows-1252, can replace MS smart quotes with their HTML or XML
equivalents."""
# This dictionary maps commonly seen values for "charset" in HTML
# meta tags to the corresponding Python codec names. It only covers
# values that aren't in Python's aliases and can't be determined
# by the heuristics in find_codec.
CHARSET_ALIASES = { "macintosh" : "mac-roman",
"x-sjis" : "shift-jis" }
def __init__(self, markup, overrideEncodings=[],
smartQuotesTo='xml', isHTML=False):
self.declaredHTMLEncoding = None
self.markup, documentEncoding, sniffedEncoding = \
self._detectEncoding(markup, isHTML)
self.smartQuotesTo = smartQuotesTo
self.triedEncodings = []
if markup == '' or isinstance(markup, unicode):
self.originalEncoding = None
self.unicode = unicode(markup)
return
u = None
for proposedEncoding in overrideEncodings:
u = self._convertFrom(proposedEncoding)
if u: break
if not u:
for proposedEncoding in (documentEncoding, sniffedEncoding):
u = self._convertFrom(proposedEncoding)
if u: break
# If no luck and we have auto-detection library, try that:
if not u and chardet and not isinstance(self.markup, unicode):
u = self._convertFrom(chardet.detect(self.markup)['encoding'])
# As a last resort, try utf-8 and windows-1252:
if not u:
for proposed_encoding in ("utf-8", "windows-1252"):
u = self._convertFrom(proposed_encoding)
if u: break
self.unicode = u
if not u: self.originalEncoding = None
def _subMSChar(self, orig):
"""Changes a MS smart quote character to an XML or HTML
entity."""
sub = self.MS_CHARS.get(orig)
if isinstance(sub, tuple):
if self.smartQuotesTo == 'xml':
sub = '&#x%s;' % sub[1]
else:
sub = '&%s;' % sub[0]
return sub
def _convertFrom(self, proposed):
proposed = self.find_codec(proposed)
if not proposed or proposed in self.triedEncodings:
return None
self.triedEncodings.append(proposed)
markup = self.markup
# Convert smart quotes to HTML if coming from an encoding
# that might have them.
if self.smartQuotesTo and proposed.lower() in("windows-1252",
"iso-8859-1",
"iso-8859-2"):
markup = re.compile("([\x80-\x9f])").sub \
(lambda(x): self._subMSChar(x.group(1)),
markup)
try:
# print "Trying to convert document to %s" % proposed
u = self._toUnicode(markup, proposed)
self.markup = u
self.originalEncoding = proposed
except Exception, e:
# print "That didn't work!"
# print e
return None
#print "Correct encoding: %s" % proposed
return self.markup
def _toUnicode(self, data, encoding):
'''Given a string and its encoding, decodes the string into Unicode.
%encoding is a string recognized by encodings.aliases'''
# strip Byte Order Mark (if present)
if (len(data) >= 4) and (data[:2] == '\xfe\xff') \
and (data[2:4] != '\x00\x00'):
encoding = 'utf-16be'
data = data[2:]
elif (len(data) >= 4) and (data[:2] == '\xff\xfe') \
and (data[2:4] != '\x00\x00'):
encoding = 'utf-16le'
data = data[2:]
elif data[:3] == '\xef\xbb\xbf':
encoding = 'utf-8'
data = data[3:]
elif data[:4] == '\x00\x00\xfe\xff':
encoding = 'utf-32be'
data = data[4:]
elif data[:4] == '\xff\xfe\x00\x00':
encoding = 'utf-32le'
data = data[4:]
newdata = unicode(data, encoding)
return newdata
def _detectEncoding(self, xml_data, isHTML=False):
"""Given a document, tries to detect its XML encoding."""
xml_encoding = sniffed_xml_encoding = None
try:
if xml_data[:4] == '\x4c\x6f\xa7\x94':
# EBCDIC
xml_data = self._ebcdic_to_ascii(xml_data)
elif xml_data[:4] == '\x00\x3c\x00\x3f':
# UTF-16BE
sniffed_xml_encoding = 'utf-16be'
xml_data = unicode(xml_data, 'utf-16be').encode('utf-8')
elif (len(xml_data) >= 4) and (xml_data[:2] == '\xfe\xff') \
and (xml_data[2:4] != '\x00\x00'):
# UTF-16BE with BOM
sniffed_xml_encoding = 'utf-16be'
xml_data = unicode(xml_data[2:], 'utf-16be').encode('utf-8')
elif xml_data[:4] == '\x3c\x00\x3f\x00':
# UTF-16LE
sniffed_xml_encoding = 'utf-16le'
xml_data = unicode(xml_data, 'utf-16le').encode('utf-8')
elif (len(xml_data) >= 4) and (xml_data[:2] == '\xff\xfe') and \
(xml_data[2:4] != '\x00\x00'):
# UTF-16LE with BOM
sniffed_xml_encoding = 'utf-16le'
xml_data = unicode(xml_data[2:], 'utf-16le').encode('utf-8')
elif xml_data[:4] == '\x00\x00\x00\x3c':
# UTF-32BE
sniffed_xml_encoding = 'utf-32be'
xml_data = unicode(xml_data, 'utf-32be').encode('utf-8')
elif xml_data[:4] == '\x3c\x00\x00\x00':
# UTF-32LE
sniffed_xml_encoding = 'utf-32le'
xml_data = unicode(xml_data, 'utf-32le').encode('utf-8')
elif xml_data[:4] == '\x00\x00\xfe\xff':
# UTF-32BE with BOM
sniffed_xml_encoding = 'utf-32be'
xml_data = unicode(xml_data[4:], 'utf-32be').encode('utf-8')
elif xml_data[:4] == '\xff\xfe\x00\x00':
# UTF-32LE with BOM
sniffed_xml_encoding = 'utf-32le'
xml_data = unicode(xml_data[4:], 'utf-32le').encode('utf-8')
elif xml_data[:3] == '\xef\xbb\xbf':
# UTF-8 with BOM
sniffed_xml_encoding = 'utf-8'
xml_data = unicode(xml_data[3:], 'utf-8').encode('utf-8')
else:
sniffed_xml_encoding = 'ascii'
pass
except:
xml_encoding_match = None
xml_encoding_match = re.compile(
'^<\?.*encoding=[\'"](.*?)[\'"].*\?>').match(xml_data)
if not xml_encoding_match and isHTML:
regexp = re.compile('<\s*meta[^>]+charset=([^>]*?)[;\'">]', re.I)
xml_encoding_match = regexp.search(xml_data)
if xml_encoding_match is not None:
xml_encoding = xml_encoding_match.groups()[0].lower()
if isHTML:
self.declaredHTMLEncoding = xml_encoding
if sniffed_xml_encoding and \
(xml_encoding in ('iso-10646-ucs-2', 'ucs-2', 'csunicode',
'iso-10646-ucs-4', 'ucs-4', 'csucs4',
'utf-16', 'utf-32', 'utf_16', 'utf_32',
'utf16', 'u16')):
xml_encoding = sniffed_xml_encoding
return xml_data, xml_encoding, sniffed_xml_encoding
def find_codec(self, charset):
return self._codec(self.CHARSET_ALIASES.get(charset, charset)) \
or (charset and self._codec(charset.replace("-", ""))) \
or (charset and self._codec(charset.replace("-", "_"))) \
or charset
def _codec(self, charset):
if not charset: return charset
codec = None
try:
codecs.lookup(charset)
codec = charset
except (LookupError, ValueError):
pass
return codec
EBCDIC_TO_ASCII_MAP = None
def _ebcdic_to_ascii(self, s):
c = self.__class__
if not c.EBCDIC_TO_ASCII_MAP:
emap = (0,1,2,3,156,9,134,127,151,141,142,11,12,13,14,15,
16,17,18,19,157,133,8,135,24,25,146,143,28,29,30,31,
128,129,130,131,132,10,23,27,136,137,138,139,140,5,6,7,
144,145,22,147,148,149,150,4,152,153,154,155,20,21,158,26,
32,160,161,162,163,164,165,166,167,168,91,46,60,40,43,33,
38,169,170,171,172,173,174,175,176,177,93,36,42,41,59,94,
45,47,178,179,180,181,182,183,184,185,124,44,37,95,62,63,
186,187,188,189,190,191,192,193,194,96,58,35,64,39,61,34,
195,97,98,99,100,101,102,103,104,105,196,197,198,199,200,
201,202,106,107,108,109,110,111,112,113,114,203,204,205,
206,207,208,209,126,115,116,117,118,119,120,121,122,210,
211,212,213,214,215,216,217,218,219,220,221,222,223,224,
225,226,227,228,229,230,231,123,65,66,67,68,69,70,71,72,
73,232,233,234,235,236,237,125,74,75,76,77,78,79,80,81,
82,238,239,240,241,242,243,92,159,83,84,85,86,87,88,89,
90,244,245,246,247,248,249,48,49,50,51,52,53,54,55,56,57,
250,251,252,253,254,255)
import string
c.EBCDIC_TO_ASCII_MAP = string.maketrans( \
''.join(map(chr, range(256))), ''.join(map(chr, emap)))
return s.translate(c.EBCDIC_TO_ASCII_MAP)
MS_CHARS = { '\x80' : ('euro', '20AC'),
'\x81' : ' ',
'\x82' : ('sbquo', '201A'),
'\x83' : ('fnof', '192'),
'\x84' : ('bdquo', '201E'),
'\x85' : ('hellip', '2026'),
'\x86' : ('dagger', '2020'),
'\x87' : ('Dagger', '2021'),
'\x88' : ('circ', '2C6'),
'\x89' : ('permil', '2030'),
'\x8A' : ('Scaron', '160'),
'\x8B' : ('lsaquo', '2039'),
'\x8C' : ('OElig', '152'),
'\x8D' : '?',
'\x8E' : ('#x17D', '17D'),
'\x8F' : '?',
'\x90' : '?',
'\x91' : ('lsquo', '2018'),
'\x92' : ('rsquo', '2019'),
'\x93' : ('ldquo', '201C'),
'\x94' : ('rdquo', '201D'),
'\x95' : ('bull', '2022'),
'\x96' : ('ndash', '2013'),
'\x97' : ('mdash', '2014'),
'\x98' : ('tilde', '2DC'),
'\x99' : ('trade', '2122'),
'\x9a' : ('scaron', '161'),
'\x9b' : ('rsaquo', '203A'),
'\x9c' : ('oelig', '153'),
'\x9d' : '?',
'\x9e' : ('#x17E', '17E'),
'\x9f' : ('Yuml', ''),}
#######################################################################
#By default, act as an HTML pretty-printer.
if __name__ == '__main__':
import sys
soup = BeautifulSoup(sys.stdin)
print soup.prettify()
| agpl-3.0 | 8,123,461,818,184,523,000 | 38.46131 | 186 | 0.570518 | false |
broferek/ansible | test/units/modules/network/check_point/test_cp_mgmt_host_facts.py | 19 | 2820 | # Ansible module to manage CheckPoint Firewall (c) 2019
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import pytest
from units.modules.utils import set_module_args, exit_json, fail_json, AnsibleExitJson
from ansible.module_utils import basic
from ansible.modules.network.check_point import cp_mgmt_host_facts
OBJECT = {
"from": 1,
"to": 1,
"total": 6,
"objects": [
"53de74b7-8f19-4cbe-99fc-a81ef0759bad"
]
}
SHOW_PLURAL_PAYLOAD = {
'limit': 1,
'details_level': 'uid'
}
SHOW_SINGLE_PAYLOAD = {
'name': 'object_which_is_not_exist'
}
api_call_object = 'host'
api_call_object_plural_version = 'hosts'
failure_msg = '''{u'message': u'Requested object [object_which_is_not_exist] not found', u'code': u'generic_err_object_not_found'}'''
class TestCheckpointHostFacts(object):
module = cp_mgmt_host_facts
@pytest.fixture(autouse=True)
def module_mock(self, mocker):
return mocker.patch.multiple(basic.AnsibleModule, exit_json=exit_json, fail_json=fail_json)
@pytest.fixture
def connection_mock(self, mocker):
connection_class_mock = mocker.patch('ansible.module_utils.network.checkpoint.checkpoint.Connection')
return connection_class_mock.return_value
def test_show_single_object_which_is_not_exist(self, mocker, connection_mock):
connection_mock.send_request.return_value = (404, failure_msg)
try:
result = self._run_module(SHOW_SINGLE_PAYLOAD)
except Exception as e:
result = e.args[0]
assert result['failed']
assert 'Checkpoint device returned error 404 with message ' + failure_msg == result['msg']
def test_show_few_objects(self, mocker, connection_mock):
connection_mock.send_request.return_value = (200, OBJECT)
result = self._run_module(SHOW_PLURAL_PAYLOAD)
assert not result['changed']
assert OBJECT == result['ansible_facts'][api_call_object_plural_version]
def _run_module(self, module_args):
set_module_args(module_args)
with pytest.raises(AnsibleExitJson) as ex:
self.module.main()
return ex.value.args[0]
| gpl-3.0 | 8,973,006,014,143,455,000 | 33.390244 | 133 | 0.694326 | false |
acutesoftware/AIKIF | aikif/.z_prototype/search.py | 1 | 1453 | # -*- coding: utf-8 -*-
import argparse
import aikif.config as cfg
def search(search_string):
"""
main function to search using indexes
"""
print('Searching for ' + search_string)
ndxFiles = cfg.params['index_files']
numResults = 0
totLines = 0
for fname in ndxFiles:
print("Searching " + fname)
with open(fname, 'r') as f:
line_num = 0
for line_num, line in enumerate(f):
totLines = totLines + 1
if search_string in line:
try:
print(line) # gives error with some encoding
except Exception:
print("Cant print search result")
numResults = numResults + 1
print(str(line_num) + " lines searched")
print('Found ', str(numResults), 'results in', str(totLines), 'lines over', str(len(ndxFiles)), 'index files')
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Search.py looks in AIKIF index files for strings')
parser.add_argument('-s', '--search', help='enter a search string, enclosed with quotes if multiple words needed')
parser.add_argument('-i', '--index', help='choose an index file to search')
args = parser.parse_args()
search(args.search.strip(''))
print("REMEMBER - call this with python otherwise it doesnt run\n python search.py -s database\n")
| gpl-3.0 | 8,887,818,560,919,525,000 | 38.297297 | 118 | 0.582244 | false |
William-Hai/volatility | volatility/scan.py | 44 | 9086 | # Volatility
# Copyright (C) 2007-2013 Volatility Foundation
#
# Derived from source in PyFlag developed by:
# Copyright 2004: Commonwealth of Australia.
# Michael Cohen <[email protected]>
# David Collett <[email protected]>
#
# This file is part of Volatility.
#
# Volatility is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# Volatility is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Volatility. If not, see <http://www.gnu.org/licenses/>.
#
# Special thanks to Michael Cohen for ideas and comments!
#
#pylint: disable-msg=C0111
"""
@author: AAron Walters
@license: GNU General Public License 2.0
@contact: [email protected]
@organization: Volatility Foundation
"""
import volatility.debug as debug
import volatility.registry as registry
import volatility.addrspace as addrspace
import volatility.constants as constants
import volatility.conf as conf
########### Following is the new implementation of the scanning
########### framework. The old framework was based on PyFlag's
########### scanning framework which is probably too complex for this.
class BaseScanner(object):
""" A more thorough scanner which checks every byte """
checks = []
def __init__(self, window_size = 8):
self.buffer = addrspace.BufferAddressSpace(conf.DummyConfig(), data = '\x00' * 1024)
self.window_size = window_size
self.constraints = []
self.error_count = 0
def check_addr(self, found):
""" This calls all our constraints on the offset found and
returns the number of contraints that matched.
We shortcut the loop as soon as its obvious that there will
not be sufficient matches to fit the criteria. This allows for
an early exit and a speed boost.
"""
cnt = 0
for check in self.constraints:
## constraints can raise for an error
try:
val = check.check(found)
except Exception:
debug.b()
val = False
if not val:
cnt = cnt + 1
if cnt > self.error_count:
return False
return True
overlap = 20
def scan(self, address_space, offset = 0, maxlen = None):
self.buffer.profile = address_space.profile
current_offset = offset
## Build our constraints from the specified ScannerCheck
## classes:
self.constraints = []
for class_name, args in self.checks:
check = registry.get_plugin_classes(ScannerCheck)[class_name](self.buffer, **args)
self.constraints.append(check)
## Which checks also have skippers?
skippers = [ c for c in self.constraints if hasattr(c, "skip") ]
for (range_start, range_size) in sorted(address_space.get_available_addresses()):
# Jump to the next available point to scan from
# self.base_offset jumps up to be at least range_start
current_offset = max(range_start, current_offset)
range_end = range_start + range_size
# If we have a maximum length, we make sure it's less than the range_end
if maxlen:
range_end = min(range_end, offset + maxlen)
while (current_offset < range_end):
# We've now got range_start <= self.base_offset < range_end
# Figure out how much data to read
l = min(constants.SCAN_BLOCKSIZE + self.overlap, range_end - current_offset)
# Populate the buffer with data
# We use zread to scan what we can because there are often invalid
# pages in the DTB
data = address_space.zread(current_offset, l)
self.buffer.assign_buffer(data, current_offset)
## Run checks throughout this block of data
i = 0
while i < l:
if self.check_addr(i + current_offset):
## yield the offset to the start of the memory
## (after the pool tag)
yield i + current_offset
## Where should we go next? By default we go 1 byte
## ahead, but if some of the checkers have skippers,
## we may actually go much farther. Checkers with
## skippers basically tell us that there is no way
## they can match anything before the skipped result,
## so there is no point in trying them on all the data
## in between. This optimization is useful to really
## speed things up. FIXME - currently skippers assume
## that the check must match, therefore we can skip
## the unmatchable region, but its possible that a
## scanner needs to match only some checkers.
skip = 1
for s in skippers:
skip = max(skip, s.skip(data, i))
i += skip
current_offset += min(constants.SCAN_BLOCKSIZE, l)
class DiscontigScanner(BaseScanner):
def scan(self, address_space, offset = 0, maxlen = None):
debug.warning("DiscontigScanner has been deprecated, all functionality is now contained in BaseScanner")
for match in BaseScanner.scan(self, address_space, offset, maxlen):
yield match
class ScannerCheck(object):
""" A scanner check is a special class which is invoked on an AS to check for a specific condition.
The main method is def check(self, offset):
This will return True if the condition is true or False otherwise.
This class is the base class for all checks.
"""
def __init__(self, address_space, **_kwargs):
self.address_space = address_space
def object_offset(self, offset, address_space):
return offset
def check(self, _offset):
return False
## If you want to speed up the scanning define this method - it
## will be used to skip the data which is obviously not going to
## match. You will need to return the number of bytes from offset
## to skip to. We take the maximum number of bytes to guarantee
## that all checks have a chance of passing.
#def skip(self, data, offset):
# return -1
class PoolScanner(BaseScanner):
def object_offset(self, found, address_space):
"""
The name of this function "object_offset" can be misleading depending
on how its used. Even before removing the preambles (r1324), it may not
always return the offset of an object. Here are the rules:
If you subclass PoolScanner and do not override this function, it
will return the offset of _POOL_HEADER. If you do override this function,
it should be used to calculate and return the offset of your desired
object within the pool. Thus there are two different ways it can be done.
Example 1.
For an example of subclassing PoolScanner and not overriding this function,
see filescan.PoolScanFile. In this case, the plugin (filescan.FileScan)
treats the offset returned by this function as the start of _POOL_HEADER
and then works out the object from the bottom up:
for offset in PoolScanFile().scan(address_space):
pool_obj = obj.Object("_POOL_HEADER", vm = address_space,
offset = offset)
##
## Work out objects base here
##
Example 2.
For an example of subclassing PoolScanner and overriding this function,
see filescan.PoolScanProcess. In this case, the "work" described above is
done here (in the sublcassed object_offset). Thus in the plugin (filescan.PSScan)
it can directly instantiate _EPROCESS from the offset we return.
for offset in PoolScanProcess().scan(address_space):
eprocess = obj.Object('_EPROCESS', vm = address_space,
native_vm = kernel_as, offset = offset)
"""
## Subtract the offset of the PoolTag member to get the start
## of _POOL_HEADER. This is done because PoolScanners search
## for the PoolTag.
return found - self.buffer.profile.get_obj_offset('_POOL_HEADER', 'PoolTag')
def scan(self, address_space, offset = 0, maxlen = None):
for i in BaseScanner.scan(self, address_space, offset, maxlen):
yield self.object_offset(i, address_space)
| gpl-2.0 | 8,870,765,082,792,051,000 | 40.113122 | 112 | 0.623817 | false |
magdamagda/geny-chorobowe | geny_chorobowe/find_disease_genes/migrations/0010_auto_20160106_0951.py | 1 | 1184 | # -*- coding: utf-8 -*-
# Generated by Django 1.9.1 on 2016-01-06 09:51
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('find_disease_genes', '0009_auto_20160105_2120'),
]
operations = [
migrations.CreateModel(
name='MedgenConcept',
fields=[
('ConceptID', models.CharField(max_length=10, primary_key=True, serialize=False)),
('Name', models.CharField(max_length=200)),
('Def', models.CharField(max_length=300)),
('Source', models.CharField(max_length=50)),
('RelatedConcepts', models.ManyToManyField(blank=True, null=True, related_name='_medgenconcept_RelatedConcepts_+', to='find_disease_genes.MedgenConcept')),
],
),
migrations.RenameField(
model_name='clinvardisease',
old_name='SourceID',
new_name='Source',
),
migrations.AlterField(
model_name='clinvarsource',
name='SourceName',
field=models.CharField(max_length=50),
),
]
| gpl-2.0 | 4,839,723,874,064,484,000 | 32.828571 | 171 | 0.576014 | false |
tanyunshi/python-docx | features/steps/table.py | 6 | 12947 | # encoding: utf-8
"""
Step implementations for table-related features
"""
from __future__ import (
absolute_import, division, print_function, unicode_literals
)
from behave import given, then, when
from docx import Document
from docx.enum.table import WD_TABLE_ALIGNMENT, WD_TABLE_DIRECTION
from docx.shared import Inches
from docx.table import _Column, _Columns, _Row, _Rows
from helpers import test_docx
# given ===================================================
@given('a 2 x 2 table')
def given_a_2x2_table(context):
context.table_ = Document().add_table(rows=2, cols=2)
@given('a 3x3 table having {span_state}')
def given_a_3x3_table_having_span_state(context, span_state):
table_idx = {
'only uniform cells': 0,
'a horizontal span': 1,
'a vertical span': 2,
'a combined span': 3,
}[span_state]
document = Document(test_docx('tbl-cell-access'))
context.table_ = document.tables[table_idx]
@given('a column collection having two columns')
def given_a_column_collection_having_two_columns(context):
docx_path = test_docx('blk-containing-table')
document = Document(docx_path)
context.columns = document.tables[0].columns
@given('a row collection having two rows')
def given_a_row_collection_having_two_rows(context):
docx_path = test_docx('blk-containing-table')
document = Document(docx_path)
context.rows = document.tables[0].rows
@given('a table')
def given_a_table(context):
context.table_ = Document().add_table(rows=2, cols=2)
@given('a table cell having a width of {width}')
def given_a_table_cell_having_a_width_of_width(context, width):
table_idx = {'no explicit setting': 0, '1 inch': 1, '2 inches': 2}[width]
document = Document(test_docx('tbl-props'))
table = document.tables[table_idx]
cell = table.cell(0, 0)
context.cell = cell
@given('a table column having a width of {width_desc}')
def given_a_table_having_a_width_of_width_desc(context, width_desc):
col_idx = {
'no explicit setting': 0,
'1440': 1,
}[width_desc]
docx_path = test_docx('tbl-col-props')
document = Document(docx_path)
context.column = document.tables[0].columns[col_idx]
@given('a table having {alignment} alignment')
def given_a_table_having_alignment_alignment(context, alignment):
table_idx = {
'inherited': 3,
'left': 4,
'right': 5,
'center': 6,
}[alignment]
docx_path = test_docx('tbl-props')
document = Document(docx_path)
context.table_ = document.tables[table_idx]
@given('a table having an autofit layout of {autofit}')
def given_a_table_having_an_autofit_layout_of_autofit(context, autofit):
tbl_idx = {
'no explicit setting': 0,
'autofit': 1,
'fixed': 2,
}[autofit]
document = Document(test_docx('tbl-props'))
context.table_ = document.tables[tbl_idx]
@given('a table having {style} style')
def given_a_table_having_style(context, style):
table_idx = {
'no explicit': 0,
'Table Grid': 1,
'Light Shading - Accent 1': 2,
}[style]
document = Document(test_docx('tbl-having-applied-style'))
context.document = document
context.table_ = document.tables[table_idx]
@given('a table having table direction set {setting}')
def given_a_table_having_table_direction_setting(context, setting):
table_idx = [
'to inherit',
'right-to-left',
'left-to-right'
].index(setting)
document = Document(test_docx('tbl-on-off-props'))
context.table_ = document.tables[table_idx]
@given('a table having two columns')
def given_a_table_having_two_columns(context):
docx_path = test_docx('blk-containing-table')
document = Document(docx_path)
# context.table is used internally by behave, underscore added
# to distinguish this one
context.table_ = document.tables[0]
@given('a table having two rows')
def given_a_table_having_two_rows(context):
docx_path = test_docx('blk-containing-table')
document = Document(docx_path)
context.table_ = document.tables[0]
# when =====================================================
@when('I add a 1.0 inch column to the table')
def when_I_add_a_1_inch_column_to_table(context):
context.column = context.table_.add_column(Inches(1.0))
@when('I add a row to the table')
def when_add_row_to_table(context):
table = context.table_
context.row = table.add_row()
@when('I assign {value_str} to table.alignment')
def when_I_assign_value_to_table_alignment(context, value_str):
value = {
'None': None,
'WD_TABLE_ALIGNMENT.LEFT': WD_TABLE_ALIGNMENT.LEFT,
'WD_TABLE_ALIGNMENT.RIGHT': WD_TABLE_ALIGNMENT.RIGHT,
'WD_TABLE_ALIGNMENT.CENTER': WD_TABLE_ALIGNMENT.CENTER,
}[value_str]
table = context.table_
table.alignment = value
@when('I assign {value} to table.style')
def when_apply_value_to_table_style(context, value):
table, styles = context.table_, context.document.styles
if value == 'None':
new_value = None
elif value.startswith('styles['):
new_value = styles[value.split('\'')[1]]
else:
new_value = styles[value]
table.style = new_value
@when('I assign {value} to table.table_direction')
def when_assign_value_to_table_table_direction(context, value):
new_value = (
None if value == 'None' else getattr(WD_TABLE_DIRECTION, value)
)
context.table_.table_direction = new_value
@when('I merge from cell {origin} to cell {other}')
def when_I_merge_from_cell_origin_to_cell_other(context, origin, other):
def cell(table, idx):
row, col = idx // 3, idx % 3
return table.cell(row, col)
a_idx, b_idx = int(origin) - 1, int(other) - 1
table = context.table_
a, b = cell(table, a_idx), cell(table, b_idx)
a.merge(b)
@when('I set the cell width to {width}')
def when_I_set_the_cell_width_to_width(context, width):
new_value = {'1 inch': Inches(1)}[width]
context.cell.width = new_value
@when('I set the column width to {width_emu}')
def when_I_set_the_column_width_to_width_emu(context, width_emu):
new_value = None if width_emu == 'None' else int(width_emu)
context.column.width = new_value
@when('I set the table autofit to {setting}')
def when_I_set_the_table_autofit_to_setting(context, setting):
new_value = {'autofit': True, 'fixed': False}[setting]
table = context.table_
table.autofit = new_value
# then =====================================================
@then('I can access a collection column by index')
def then_can_access_collection_column_by_index(context):
columns = context.columns
for idx in range(2):
column = columns[idx]
assert isinstance(column, _Column)
@then('I can access a collection row by index')
def then_can_access_collection_row_by_index(context):
rows = context.rows
for idx in range(2):
row = rows[idx]
assert isinstance(row, _Row)
@then('I can access the column collection of the table')
def then_can_access_column_collection_of_table(context):
table = context.table_
columns = table.columns
assert isinstance(columns, _Columns)
@then('I can access the row collection of the table')
def then_can_access_row_collection_of_table(context):
table = context.table_
rows = table.rows
assert isinstance(rows, _Rows)
@then('I can iterate over the column collection')
def then_can_iterate_over_column_collection(context):
columns = context.columns
actual_count = 0
for column in columns:
actual_count += 1
assert isinstance(column, _Column)
assert actual_count == 2
@then('I can iterate over the row collection')
def then_can_iterate_over_row_collection(context):
rows = context.rows
actual_count = 0
for row in rows:
actual_count += 1
assert isinstance(row, _Row)
assert actual_count == 2
@then('table.alignment is {value_str}')
def then_table_alignment_is_value(context, value_str):
value = {
'None': None,
'WD_TABLE_ALIGNMENT.LEFT': WD_TABLE_ALIGNMENT.LEFT,
'WD_TABLE_ALIGNMENT.RIGHT': WD_TABLE_ALIGNMENT.RIGHT,
'WD_TABLE_ALIGNMENT.CENTER': WD_TABLE_ALIGNMENT.CENTER,
}[value_str]
table = context.table_
assert table.alignment == value, 'got %s' % table.alignment
@then('table.cell({row}, {col}).text is {expected_text}')
def then_table_cell_row_col_text_is_text(context, row, col, expected_text):
table = context.table_
row_idx, col_idx = int(row), int(col)
cell_text = table.cell(row_idx, col_idx).text
assert cell_text == expected_text, 'got %s' % cell_text
@then('table.style is styles[\'{style_name}\']')
def then_table_style_is_styles_style_name(context, style_name):
table, styles = context.table_, context.document.styles
expected_style = styles[style_name]
assert table.style == expected_style, "got '%s'" % table.style
@then('table.table_direction is {value}')
def then_table_table_direction_is_value(context, value):
expected_value = (
None if value == 'None' else getattr(WD_TABLE_DIRECTION, value)
)
actual_value = context.table_.table_direction
assert actual_value == expected_value, "got '%s'" % actual_value
@then('the column cells text is {expected_text}')
def then_the_column_cells_text_is_expected_text(context, expected_text):
table = context.table_
cells_text = ' '.join(c.text for col in table.columns for c in col.cells)
assert cells_text == expected_text, 'got %s' % cells_text
@then('the length of the column collection is 2')
def then_len_of_column_collection_is_2(context):
columns = context.table_.columns
assert len(columns) == 2
@then('the length of the row collection is 2')
def then_len_of_row_collection_is_2(context):
rows = context.table_.rows
assert len(rows) == 2
@then('the new column has 2 cells')
def then_new_column_has_2_cells(context):
assert len(context.column.cells) == 2
@then('the new column is 1.0 inches wide')
def then_new_column_is_1_inches_wide(context):
assert context.column.width == Inches(1)
@then('the new row has 2 cells')
def then_new_row_has_2_cells(context):
assert len(context.row.cells) == 2
@then('the reported autofit setting is {autofit}')
def then_the_reported_autofit_setting_is_autofit(context, autofit):
expected_value = {'autofit': True, 'fixed': False}[autofit]
table = context.table_
assert table.autofit is expected_value
@then('the reported column width is {width_emu}')
def then_the_reported_column_width_is_width_emu(context, width_emu):
expected_value = None if width_emu == 'None' else int(width_emu)
assert context.column.width == expected_value, (
'got %s' % context.column.width
)
@then('the reported width of the cell is {width}')
def then_the_reported_width_of_the_cell_is_width(context, width):
expected_width = {'None': None, '1 inch': Inches(1)}[width]
actual_width = context.cell.width
assert actual_width == expected_width, (
'expected %s, got %s' % (expected_width, actual_width)
)
@then('the row cells text is {encoded_text}')
def then_the_row_cells_text_is_expected_text(context, encoded_text):
expected_text = encoded_text.replace('\\', '\n')
table = context.table_
cells_text = ' '.join(c.text for row in table.rows for c in row.cells)
assert cells_text == expected_text, 'got %s' % cells_text
@then('the table has {count} columns')
def then_table_has_count_columns(context, count):
column_count = int(count)
columns = context.table_.columns
assert len(columns) == column_count
@then('the table has {count} rows')
def then_table_has_count_rows(context, count):
row_count = int(count)
rows = context.table_.rows
assert len(rows) == row_count
@then('the width of cell {n_str} is {inches_str} inches')
def then_the_width_of_cell_n_is_x_inches(context, n_str, inches_str):
def _cell(table, idx):
row, col = idx // 3, idx % 3
return table.cell(row, col)
idx, inches = int(n_str) - 1, float(inches_str)
cell = _cell(context.table_, idx)
assert cell.width == Inches(inches), 'got %s' % cell.width.inches
@then('the width of each cell is {inches} inches')
def then_the_width_of_each_cell_is_inches(context, inches):
table = context.table_
expected_width = Inches(float(inches))
for cell in table._cells:
assert cell.width == expected_width, 'got %s' % cell.width.inches
@then('the width of each column is {inches} inches')
def then_the_width_of_each_column_is_inches(context, inches):
table = context.table_
expected_width = Inches(float(inches))
for column in table.columns:
assert column.width == expected_width, 'got %s' % column.width.inches
| mit | -1,849,273,605,797,733,400 | 30.732843 | 77 | 0.653974 | false |
nop33/indico | indico/modules/events/registration/placeholders/invitations.py | 2 | 1675 | # This file is part of Indico.
# Copyright (C) 2002 - 2017 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from markupsafe import Markup
from indico.util.i18n import _
from indico.util.placeholders import Placeholder
from indico.web.flask.util import url_for
class FirstNamePlaceholder(Placeholder):
name = 'first_name'
description = _("First name of the person")
@classmethod
def render(cls, invitation):
return invitation.first_name
class LastNamePlaceholder(Placeholder):
name = 'last_name'
description = _("Last name of the person")
@classmethod
def render(cls, invitation):
return invitation.last_name
class InvitationLinkPlaceholder(Placeholder):
name = 'invitation_link'
description = _("Link to accept/decline the invitation")
required = True
@classmethod
def render(cls, invitation):
url = url_for('.display_regform', invitation.locator.uuid, _external=True)
return Markup('<a href="{0}">{0}</a>'.format(url))
| gpl-3.0 | 6,436,381,824,222,359,000 | 31.211538 | 82 | 0.725373 | false |
Taapat/enigma2-openpli-vuplus | lib/python/Components/Sources/Boolean.py | 162 | 1264 | from Source import Source
from Components.Element import cached
from enigma import eTimer
# a small warning:
# you can use that boolean well to express screen-private
# conditional expressions.
#
# however, if you think that there is ANY interest that another
# screen could use your expression, please put your calculation
# into a seperate Source, providing a "boolean"-property.
class Boolean(Source, object):
def __init__(self, fixed = False, function = None, destroy = None, poll = 0):
Source.__init__(self)
self.function = function
self.fixed = fixed
self.post_destroy = destroy
if poll > 0:
self.poll_timer = eTimer()
self.poll_timer.callback.append(self.poll)
self.poll_timer.start(poll)
else:
self.poll_timer = None
@cached
def getBoolean(self):
if self.function is not None:
return self.function()
else:
return self.fixed
def setBoolean(self, value):
assert self.function is None
self.fixed = value
self.poll()
boolean = property(getBoolean, setBoolean)
def poll(self):
self.changed((self.CHANGED_ALL,))
def destroy(self):
if self.poll_timer:
self.poll_timer.callback.remove(self.poll)
if self.post_destroy is not None:
self.fixed = self.post_destroy
self.poll()
Source.destroy(self)
| gpl-2.0 | 5,022,945,174,676,182,000 | 25.333333 | 78 | 0.721519 | false |
getnikola/plugins | v7/link_figure/link_figure.py | 2 | 4919 | # -*- coding: utf-8 -*-
# Copyright © 2014 Ivan Teoh and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
# documentation files (the "Software"), to deal in the
# Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the
# Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice
# shall be included in all copies or substantial portions of
# the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
# PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
# OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
from docutils import nodes
from docutils.parsers.rst import Directive, directives
from nikola.plugin_categories import RestExtension
class Plugin(RestExtension):
name = "link_figure"
def set_site(self, site):
self.site = site
directives.register_directive('link_figure', LinkFigure)
return super(Plugin, self).set_site(site)
CODE_URL_BASIC = (u"""<a class="{classes}"
href="{url}"
title="{description}">
{title}
</a>""")
CODE_IMAGE = (u"""<div class="link-figure-media">
<a class="link-figure-image" href="{url}" target="_blank">
<img src="{image_url}" alt="{title}" />
</a>
</div>""")
CODE_DESCRIPTION = (u"""<p class="link-figure-description">
{description}
</p>""")
CODE_AUTHOR = (u"""<p class="link-figure-author">
{author_by}{author}
</p>""")
CODE_AUTHOR_URL = (u"""<p class="link-figure-author">
{author_by}<a href="{author_url}" target="_blank">
{author}
</a></p>""")
CODE_URL = (u"""<div class="link-figure-content">
<a class="link-figure-title" href="{url}" target="_blank">{title}</a>
{description}
{author}
</div>""")
CODE = (u"""<div class="{classes}">
{image_url}
{url}
</div>""")
class LinkFigure(Directive):
""" Restructured text extension for inserting link figure
Usage:
.. link_figure:: url
:title: url title
:description: url description
:class: class name
:image_url: url image
:author: url domain or author
:author_url: author url
:author_by: author by symbol
"""
has_content = False
required_arguments = 1
optional_arguments = 0
final_argument_whitespace = True
option_spec = {
'title': directives.unchanged,
'description': directives.unchanged,
'class': directives.unchanged,
'image_url': directives.unchanged,
'author': directives.unchanged,
'author_url': directives.unchanged,
'author_by': directives.unchanged,
}
def run(self):
""" Required by the Directive interface. Create docutils nodes """
options = {
'url': self.arguments[0],
'title': self.options.get('title', ''),
'description': self.options.get('description', ''),
'classes': self.options.get('class', ''),
'image_url': self.options.get('image_url', ''),
'author': self.options.get('author', ''),
'author_url': self.options.get('author_url', ''),
'author_by': self.options.get('author_by', ''),
}
if not options['title']:
if options['url'].endswith('/'):
options['title'] = options['url'][:-1]
options['title'] = options['title'].split('/')[-1]
options['title'] = options['title'].split('?')[0]
if not options['description']:
options['description'] = options['title']
return [nodes.raw('', CODE_URL_BASIC.format(**options), format='html')]
if options['image_url']:
options['image_url'] = CODE_IMAGE.format(**options)
if options['author_by']:
options['author_by'] = options['author_by'].strip() + ' '
if options['author'] and options['author_url']:
options['author'] = CODE_AUTHOR_URL.format(**options)
elif options['author']:
options['author'] = CODE_AUTHOR.format(**options)
if options['description']:
options['description'] = CODE_DESCRIPTION.format(**options)
options['url'] = CODE_URL.format(**options)
return [nodes.raw('', CODE.format(**options), format='html')]
def assert_has_content(self):
""" LinkFigure has no content, override check from superclass """
pass
| mit | 8,292,915,414,803,617,000 | 33.152778 | 83 | 0.618341 | false |
abhiii5459/sympy | sympy/parsing/sympy_parser.py | 43 | 30008 | """Transform a string with Python-like source code into SymPy expression. """
from __future__ import print_function, division
from .sympy_tokenize import \
generate_tokens, untokenize, TokenError, \
NUMBER, STRING, NAME, OP, ENDMARKER
from keyword import iskeyword
import ast
import re
import unicodedata
import sympy
from sympy.core.compatibility import exec_, StringIO
from sympy.core.basic import Basic
_re_repeated = re.compile(r"^(\d*)\.(\d*)\[(\d+)\]$")
def _token_splittable(token):
"""
Predicate for whether a token name can be split into multiple tokens.
A token is splittable if it does not contain an underscore character and
it is not the name of a Greek letter. This is used to implicitly convert
expressions like 'xyz' into 'x*y*z'.
"""
if '_' in token:
return False
else:
try:
return not unicodedata.lookup('GREEK SMALL LETTER ' + token)
except KeyError:
pass
if len(token) > 1:
return True
return False
def _token_callable(token, local_dict, global_dict, nextToken=None):
"""
Predicate for whether a token name represents a callable function.
Essentially wraps ``callable``, but looks up the token name in the
locals and globals.
"""
func = local_dict.get(token[1])
if not func:
func = global_dict.get(token[1])
return callable(func) and not isinstance(func, sympy.Symbol)
def _add_factorial_tokens(name, result):
if result == [] or result[-1][1] == '(':
raise TokenError()
beginning = [(NAME, name), (OP, '(')]
end = [(OP, ')')]
diff = 0
length = len(result)
for index, token in enumerate(result[::-1]):
toknum, tokval = token
i = length - index - 1
if tokval == ')':
diff += 1
elif tokval == '(':
diff -= 1
if diff == 0:
if i - 1 >= 0 and result[i - 1][0] == NAME:
return result[:i - 1] + beginning + result[i - 1:] + end
else:
return result[:i] + beginning + result[i:] + end
return result
class AppliedFunction(object):
"""
A group of tokens representing a function and its arguments.
`exponent` is for handling the shorthand sin^2, ln^2, etc.
"""
def __init__(self, function, args, exponent=None):
if exponent is None:
exponent = []
self.function = function
self.args = args
self.exponent = exponent
self.items = ['function', 'args', 'exponent']
def expand(self):
"""Return a list of tokens representing the function"""
result = []
result.append(self.function)
result.extend(self.args)
return result
def __getitem__(self, index):
return getattr(self, self.items[index])
def __repr__(self):
return "AppliedFunction(%s, %s, %s)" % (self.function, self.args,
self.exponent)
class ParenthesisGroup(list):
"""List of tokens representing an expression in parentheses."""
pass
def _flatten(result):
result2 = []
for tok in result:
if isinstance(tok, AppliedFunction):
result2.extend(tok.expand())
else:
result2.append(tok)
return result2
def _group_parentheses(recursor):
def _inner(tokens, local_dict, global_dict):
"""Group tokens between parentheses with ParenthesisGroup.
Also processes those tokens recursively.
"""
result = []
stacks = []
stacklevel = 0
for token in tokens:
if token[0] == OP:
if token[1] == '(':
stacks.append(ParenthesisGroup([]))
stacklevel += 1
elif token[1] == ')':
stacks[-1].append(token)
stack = stacks.pop()
if len(stacks) > 0:
# We don't recurse here since the upper-level stack
# would reprocess these tokens
stacks[-1].extend(stack)
else:
# Recurse here to handle nested parentheses
# Strip off the outer parentheses to avoid an infinite loop
inner = stack[1:-1]
inner = recursor(inner,
local_dict,
global_dict)
parenGroup = [stack[0]] + inner + [stack[-1]]
result.append(ParenthesisGroup(parenGroup))
stacklevel -= 1
continue
if stacklevel:
stacks[-1].append(token)
else:
result.append(token)
if stacklevel:
raise TokenError("Mismatched parentheses")
return result
return _inner
def _apply_functions(tokens, local_dict, global_dict):
"""Convert a NAME token + ParenthesisGroup into an AppliedFunction.
Note that ParenthesisGroups, if not applied to any function, are
converted back into lists of tokens.
"""
result = []
symbol = None
for tok in tokens:
if tok[0] == NAME:
symbol = tok
result.append(tok)
elif isinstance(tok, ParenthesisGroup):
if symbol and _token_callable(symbol, local_dict, global_dict):
result[-1] = AppliedFunction(symbol, tok)
symbol = None
else:
result.extend(tok)
else:
symbol = None
result.append(tok)
return result
def _implicit_multiplication(tokens, local_dict, global_dict):
"""Implicitly adds '*' tokens.
Cases:
- Two AppliedFunctions next to each other ("sin(x)cos(x)")
- AppliedFunction next to an open parenthesis ("sin x (cos x + 1)")
- A close parenthesis next to an AppliedFunction ("(x+2)sin x")\
- A close parenthesis next to an open parenthesis ("(x+2)(x+3)")
- AppliedFunction next to an implicitly applied function ("sin(x)cos x")
"""
result = []
for tok, nextTok in zip(tokens, tokens[1:]):
result.append(tok)
if (isinstance(tok, AppliedFunction) and
isinstance(nextTok, AppliedFunction)):
result.append((OP, '*'))
elif (isinstance(tok, AppliedFunction) and
nextTok[0] == OP and nextTok[1] == '('):
# Applied function followed by an open parenthesis
result.append((OP, '*'))
elif (tok[0] == OP and tok[1] == ')' and
isinstance(nextTok, AppliedFunction)):
# Close parenthesis followed by an applied function
result.append((OP, '*'))
elif (tok[0] == OP and tok[1] == ')' and
nextTok[0] == NAME):
# Close parenthesis followed by an implicitly applied function
result.append((OP, '*'))
elif (tok[0] == nextTok[0] == OP
and tok[1] == ')' and nextTok[1] == '('):
# Close parenthesis followed by an open parenthesis
result.append((OP, '*'))
elif (isinstance(tok, AppliedFunction) and nextTok[0] == NAME):
# Applied function followed by implicitly applied function
result.append((OP, '*'))
elif (tok[0] == NAME and
not _token_callable(tok, local_dict, global_dict) and
nextTok[0] == OP and nextTok[1] == '('):
# Constant followed by parenthesis
result.append((OP, '*'))
elif (tok[0] == NAME and
not _token_callable(tok, local_dict, global_dict) and
nextTok[0] == NAME and
not _token_callable(nextTok, local_dict, global_dict)):
# Constant followed by constant
result.append((OP, '*'))
elif (tok[0] == NAME and
not _token_callable(tok, local_dict, global_dict) and
(isinstance(nextTok, AppliedFunction) or nextTok[0] == NAME)):
# Constant followed by (implicitly applied) function
result.append((OP, '*'))
if tokens:
result.append(tokens[-1])
return result
def _implicit_application(tokens, local_dict, global_dict):
"""Adds parentheses as needed after functions."""
result = []
appendParen = 0 # number of closing parentheses to add
skip = 0 # number of tokens to delay before adding a ')' (to
# capture **, ^, etc.)
exponentSkip = False # skipping tokens before inserting parentheses to
# work with function exponentiation
for tok, nextTok in zip(tokens, tokens[1:]):
result.append(tok)
if (tok[0] == NAME and
nextTok[0] != OP and
nextTok[0] != ENDMARKER):
if _token_callable(tok, local_dict, global_dict, nextTok):
result.append((OP, '('))
appendParen += 1
# name followed by exponent - function exponentiation
elif (tok[0] == NAME and nextTok[0] == OP and nextTok[1] == '**'):
if _token_callable(tok, local_dict, global_dict):
exponentSkip = True
elif exponentSkip:
# if the last token added was an applied function (i.e. the
# power of the function exponent) OR a multiplication (as
# implicit multiplication would have added an extraneous
# multiplication)
if (isinstance(tok, AppliedFunction)
or (tok[0] == OP and tok[1] == '*')):
# don't add anything if the next token is a multiplication
# or if there's already a parenthesis (if parenthesis, still
# stop skipping tokens)
if not (nextTok[0] == OP and nextTok[1] == '*'):
if not(nextTok[0] == OP and nextTok[1] == '('):
result.append((OP, '('))
appendParen += 1
exponentSkip = False
elif appendParen:
if nextTok[0] == OP and nextTok[1] in ('^', '**', '*'):
skip = 1
continue
if skip:
skip -= 1
continue
result.append((OP, ')'))
appendParen -= 1
if tokens:
result.append(tokens[-1])
if appendParen:
result.extend([(OP, ')')] * appendParen)
return result
def function_exponentiation(tokens, local_dict, global_dict):
"""Allows functions to be exponentiated, e.g. ``cos**2(x)``.
Examples
========
>>> from sympy.parsing.sympy_parser import (parse_expr,
... standard_transformations, function_exponentiation)
>>> transformations = standard_transformations + (function_exponentiation,)
>>> parse_expr('sin**4(x)', transformations=transformations)
sin(x)**4
"""
result = []
exponent = []
consuming_exponent = False
level = 0
for tok, nextTok in zip(tokens, tokens[1:]):
if tok[0] == NAME and nextTok[0] == OP and nextTok[1] == '**':
if _token_callable(tok, local_dict, global_dict):
consuming_exponent = True
elif consuming_exponent:
exponent.append(tok)
# only want to stop after hitting )
if tok[0] == nextTok[0] == OP and tok[1] == ')' and nextTok[1] == '(':
consuming_exponent = False
# if implicit multiplication was used, we may have )*( instead
if tok[0] == nextTok[0] == OP and tok[1] == '*' and nextTok[1] == '(':
consuming_exponent = False
del exponent[-1]
continue
elif exponent and not consuming_exponent:
if tok[0] == OP:
if tok[1] == '(':
level += 1
elif tok[1] == ')':
level -= 1
if level == 0:
result.append(tok)
result.extend(exponent)
exponent = []
continue
result.append(tok)
if tokens:
result.append(tokens[-1])
if exponent:
result.extend(exponent)
return result
def split_symbols_custom(predicate):
"""Creates a transformation that splits symbol names.
``predicate`` should return True if the symbol name is to be split.
For instance, to retain the default behavior but avoid splitting certain
symbol names, a predicate like this would work:
>>> from sympy.parsing.sympy_parser import (parse_expr, _token_splittable,
... standard_transformations, implicit_multiplication,
... split_symbols_custom)
>>> def can_split(symbol):
... if symbol not in ('list', 'of', 'unsplittable', 'names'):
... return _token_splittable(symbol)
... return False
...
>>> transformation = split_symbols_custom(can_split)
>>> parse_expr('unsplittable', transformations=standard_transformations +
... (transformation, implicit_multiplication))
unsplittable
"""
def _split_symbols(tokens, local_dict, global_dict):
result = []
split = False
split_previous=False
for tok in tokens:
if split_previous:
# throw out closing parenthesis of Symbol that was split
split_previous=False
continue
split_previous=False
if tok[0] == NAME and tok[1] == 'Symbol':
split = True
elif split and tok[0] == NAME:
symbol = tok[1][1:-1]
if predicate(symbol):
for char in symbol:
if char in local_dict or char in global_dict:
# Get rid of the call to Symbol
del result[-2:]
result.extend([(NAME, "%s" % char),
(NAME, 'Symbol'), (OP, '(')])
else:
result.extend([(NAME, "'%s'" % char), (OP, ')'),
(NAME, 'Symbol'), (OP, '(')])
# Delete the last two tokens: get rid of the extraneous
# Symbol( we just added
# Also, set split_previous=True so will skip
# the closing parenthesis of the original Symbol
del result[-2:]
split = False
split_previous = True
continue
else:
split = False
result.append(tok)
return result
return _split_symbols
#: Splits symbol names for implicit multiplication.
#:
#: Intended to let expressions like ``xyz`` be parsed as ``x*y*z``. Does not
#: split Greek character names, so ``theta`` will *not* become
#: ``t*h*e*t*a``. Generally this should be used with
#: ``implicit_multiplication``.
split_symbols = split_symbols_custom(_token_splittable)
def implicit_multiplication(result, local_dict, global_dict):
"""Makes the multiplication operator optional in most cases.
Use this before :func:`implicit_application`, otherwise expressions like
``sin 2x`` will be parsed as ``x * sin(2)`` rather than ``sin(2*x)``.
Examples
========
>>> from sympy.parsing.sympy_parser import (parse_expr,
... standard_transformations, implicit_multiplication)
>>> transformations = standard_transformations + (implicit_multiplication,)
>>> parse_expr('3 x y', transformations=transformations)
3*x*y
"""
# These are interdependent steps, so we don't expose them separately
for step in (_group_parentheses(implicit_multiplication),
_apply_functions,
_implicit_multiplication):
result = step(result, local_dict, global_dict)
result = _flatten(result)
return result
def implicit_application(result, local_dict, global_dict):
"""Makes parentheses optional in some cases for function calls.
Use this after :func:`implicit_multiplication`, otherwise expressions
like ``sin 2x`` will be parsed as ``x * sin(2)`` rather than
``sin(2*x)``.
Examples
========
>>> from sympy.parsing.sympy_parser import (parse_expr,
... standard_transformations, implicit_application)
>>> transformations = standard_transformations + (implicit_application,)
>>> parse_expr('cot z + csc z', transformations=transformations)
cot(z) + csc(z)
"""
for step in (_group_parentheses(implicit_application),
_apply_functions,
_implicit_application,):
result = step(result, local_dict, global_dict)
result = _flatten(result)
return result
def implicit_multiplication_application(result, local_dict, global_dict):
"""Allows a slightly relaxed syntax.
- Parentheses for single-argument method calls are optional.
- Multiplication is implicit.
- Symbol names can be split (i.e. spaces are not needed between
symbols).
- Functions can be exponentiated.
Examples
========
>>> from sympy.parsing.sympy_parser import (parse_expr,
... standard_transformations, implicit_multiplication_application)
>>> parse_expr("10sin**2 x**2 + 3xyz + tan theta",
... transformations=(standard_transformations +
... (implicit_multiplication_application,)))
3*x*y*z + 10*sin(x**2)**2 + tan(theta)
"""
for step in (split_symbols, implicit_multiplication,
implicit_application, function_exponentiation):
result = step(result, local_dict, global_dict)
return result
def auto_symbol(tokens, local_dict, global_dict):
"""Inserts calls to ``Symbol`` for undefined variables."""
result = []
prevTok = (None, None)
tokens.append((None, None)) # so zip traverses all tokens
for tok, nextTok in zip(tokens, tokens[1:]):
tokNum, tokVal = tok
nextTokNum, nextTokVal = nextTok
if tokNum == NAME:
name = tokVal
if (name in ['True', 'False', 'None']
or iskeyword(name)
or name in local_dict
# Don't convert attribute access
or (prevTok[0] == OP and prevTok[1] == '.')
# Don't convert keyword arguments
or (prevTok[0] == OP and prevTok[1] in ('(', ',')
and nextTokNum == OP and nextTokVal == '=')):
result.append((NAME, name))
continue
elif name in global_dict:
obj = global_dict[name]
if isinstance(obj, (Basic, type)) or callable(obj):
result.append((NAME, name))
continue
result.extend([
(NAME, 'Symbol'),
(OP, '('),
(NAME, repr(str(name))),
(OP, ')'),
])
else:
result.append((tokNum, tokVal))
prevTok = (tokNum, tokVal)
return result
def lambda_notation(tokens, local_dict, global_dict):
"""Substitutes "lambda" with its Sympy equivalent Lambda().
However, the conversion doesn't take place if only "lambda"
is passed because that is a syntax error.
"""
result = []
flag = False
toknum, tokval = tokens[0]
tokLen = len(tokens)
if toknum == NAME and tokval == 'lambda':
if tokLen == 2:
result.extend(tokens)
elif tokLen > 2:
result.extend([
(NAME, 'Lambda'),
(OP, '('),
(OP, '('),
(OP, ')'),
(OP, ')'),
])
for tokNum, tokVal in tokens[1:]:
if tokNum == OP and tokVal == ':':
tokVal = ','
flag = True
if flag:
result.insert(-1, (tokNum, tokVal))
else:
result.insert(-2, (tokNum, tokVal))
else:
result.extend(tokens)
return result
def factorial_notation(tokens, local_dict, global_dict):
"""Allows standard notation for factorial."""
result = []
prevtoken = ''
for toknum, tokval in tokens:
if toknum == OP:
op = tokval
if op == '!!':
if prevtoken == '!' or prevtoken == '!!':
raise TokenError
result = _add_factorial_tokens('factorial2', result)
elif op == '!':
if prevtoken == '!' or prevtoken == '!!':
raise TokenError
result = _add_factorial_tokens('factorial', result)
else:
result.append((OP, op))
else:
result.append((toknum, tokval))
prevtoken = tokval
return result
def convert_xor(tokens, local_dict, global_dict):
"""Treats XOR, ``^``, as exponentiation, ``**``."""
result = []
for toknum, tokval in tokens:
if toknum == OP:
if tokval == '^':
result.append((OP, '**'))
else:
result.append((toknum, tokval))
else:
result.append((toknum, tokval))
return result
def auto_number(tokens, local_dict, global_dict):
"""Converts numeric literals to use SymPy equivalents.
Complex numbers use ``I``; integer literals use ``Integer``, float
literals use ``Float``, and repeating decimals use ``Rational``.
"""
result = []
prevtoken = ''
for toknum, tokval in tokens:
if toknum == NUMBER:
number = tokval
postfix = []
if number.endswith('j') or number.endswith('J'):
number = number[:-1]
postfix = [(OP, '*'), (NAME, 'I')]
if '.' in number or (('e' in number or 'E' in number) and
not (number.startswith('0x') or number.startswith('0X'))):
match = _re_repeated.match(number)
if match is not None:
# Clear repeating decimals, e.g. 3.4[31] -> (3 + 4/10 + 31/990)
pre, post, repetend = match.groups()
zeros = '0'*len(post)
post, repetends = [w.lstrip('0') for w in [post, repetend]]
# or else interpreted as octal
a = pre or '0'
b, c = post or '0', '1' + zeros
d, e = repetends, ('9'*len(repetend)) + zeros
seq = [
(OP, '('),
(NAME,
'Integer'), (OP, '('), (NUMBER, a), (OP, ')'),
(OP, '+'),
(NAME, 'Rational'), (OP, '('), (
NUMBER, b), (OP, ','), (NUMBER, c), (OP, ')'),
(OP, '+'),
(NAME, 'Rational'), (OP, '('), (
NUMBER, d), (OP, ','), (NUMBER, e), (OP, ')'),
(OP, ')'),
]
else:
seq = [(NAME, 'Float'), (OP, '('),
(NUMBER, repr(str(number))), (OP, ')')]
else:
seq = [(NAME, 'Integer'), (OP, '('), (
NUMBER, number), (OP, ')')]
result.extend(seq + postfix)
else:
result.append((toknum, tokval))
return result
def rationalize(tokens, local_dict, global_dict):
"""Converts floats into ``Rational``. Run AFTER ``auto_number``."""
result = []
passed_float = False
for toknum, tokval in tokens:
if toknum == NAME:
if tokval == 'Float':
passed_float = True
tokval = 'Rational'
result.append((toknum, tokval))
elif passed_float == True and toknum == NUMBER:
passed_float = False
result.append((STRING, tokval))
else:
result.append((toknum, tokval))
return result
#: Standard transformations for :func:`parse_expr`.
#: Inserts calls to :class:`Symbol`, :class:`Integer`, and other SymPy
#: datatypes and allows the use of standard factorial notation (e.g. ``x!``).
standard_transformations = (lambda_notation, auto_symbol, auto_number, factorial_notation)
def stringify_expr(s, local_dict, global_dict, transformations):
"""
Converts the string ``s`` to Python code, in ``local_dict``
Generally, ``parse_expr`` should be used.
"""
tokens = []
input_code = StringIO(s.strip())
for toknum, tokval, _, _, _ in generate_tokens(input_code.readline):
tokens.append((toknum, tokval))
for transform in transformations:
tokens = transform(tokens, local_dict, global_dict)
return untokenize(tokens)
def eval_expr(code, local_dict, global_dict):
"""
Evaluate Python code generated by ``stringify_expr``.
Generally, ``parse_expr`` should be used.
"""
expr = eval(
code, global_dict, local_dict) # take local objects in preference
return expr
def parse_expr(s, local_dict=None, transformations=standard_transformations,
global_dict=None, evaluate=True):
"""Converts the string ``s`` to a SymPy expression, in ``local_dict``
Parameters
==========
s : str
The string to parse.
local_dict : dict, optional
A dictionary of local variables to use when parsing.
global_dict : dict, optional
A dictionary of global variables. By default, this is initialized
with ``from sympy import *``; provide this parameter to override
this behavior (for instance, to parse ``"Q & S"``).
transformations : tuple, optional
A tuple of transformation functions used to modify the tokens of the
parsed expression before evaluation. The default transformations
convert numeric literals into their SymPy equivalents, convert
undefined variables into SymPy symbols, and allow the use of standard
mathematical factorial notation (e.g. ``x!``).
evaluate : bool, optional
When False, the order of the arguments will remain as they were in the
string and automatic simplification that would normally occur is
suppressed. (see examples)
Examples
========
>>> from sympy.parsing.sympy_parser import parse_expr
>>> parse_expr("1/2")
1/2
>>> type(_)
<class 'sympy.core.numbers.Half'>
>>> from sympy.parsing.sympy_parser import standard_transformations,\\
... implicit_multiplication_application
>>> transformations = (standard_transformations +
... (implicit_multiplication_application,))
>>> parse_expr("2x", transformations=transformations)
2*x
When evaluate=False, some automatic simplifications will not occur:
>>> parse_expr("2**3"), parse_expr("2**3", evaluate=False)
(8, 2**3)
In addition the order of the arguments will not be made canonical.
This feature allows one to tell exactly how the expression was entered:
>>> a = parse_expr('1 + x', evaluate=False)
>>> b = parse_expr('x + 1', evaluate=0)
>>> a == b
False
>>> a.args
(1, x)
>>> b.args
(x, 1)
See Also
========
stringify_expr, eval_expr, standard_transformations,
implicit_multiplication_application
"""
if local_dict is None:
local_dict = {}
if global_dict is None:
global_dict = {}
exec_('from sympy import *', global_dict)
code = stringify_expr(s, local_dict, global_dict, transformations)
if not evaluate:
code = compile(evaluateFalse(code), '<string>', 'eval')
return eval_expr(code, local_dict, global_dict)
def evaluateFalse(s):
"""
Replaces operators with the SymPy equivalent and sets evaluate=False.
"""
node = ast.parse(s)
node = EvaluateFalseTransformer().visit(node)
# node is a Module, we want an Expression
node = ast.Expression(node.body[0].value)
return ast.fix_missing_locations(node)
class EvaluateFalseTransformer(ast.NodeTransformer):
operators = {
ast.Add: 'Add',
ast.Mult: 'Mul',
ast.Pow: 'Pow',
ast.Sub: 'Add',
ast.Div: 'Mul',
ast.BitOr: 'Or',
ast.BitAnd: 'And',
ast.BitXor: 'Not',
}
def flatten(self, args, func):
result = []
for arg in args:
if isinstance(arg, ast.Call) and arg.func.id == func:
result.extend(self.flatten(arg.args, func))
else:
result.append(arg)
return result
def visit_BinOp(self, node):
if node.op.__class__ in self.operators:
sympy_class = self.operators[node.op.__class__]
right = self.visit(node.right)
if isinstance(node.op, ast.Sub):
right = ast.UnaryOp(op=ast.USub(), operand=right)
elif isinstance(node.op, ast.Div):
right = ast.Call(
func=ast.Name(id='Pow', ctx=ast.Load()),
args=[right, ast.UnaryOp(op=ast.USub(), operand=ast.Num(1))],
keywords=[ast.keyword(arg='evaluate', value=ast.Name(id='False', ctx=ast.Load()))],
starargs=None,
kwargs=None
)
new_node = ast.Call(
func=ast.Name(id=sympy_class, ctx=ast.Load()),
args=[self.visit(node.left), right],
keywords=[ast.keyword(arg='evaluate', value=ast.Name(id='False', ctx=ast.Load()))],
starargs=None,
kwargs=None
)
if sympy_class in ('Add', 'Mul'):
# Denest Add or Mul as appropriate
new_node.args = self.flatten(new_node.args, sympy_class)
return new_node
return node
| bsd-3-clause | 2,356,793,139,194,653,700 | 32.679012 | 103 | 0.540956 | false |
Tetchain/pycoin | tests/encoding_test.py | 18 | 9641 | #!/usr/bin/env python
import unittest
from pycoin import encoding
from pycoin.serialize import h2b
class EncodingTestCase(unittest.TestCase):
def test_to_from_long(self):
def do_test(as_int, prefix, as_rep, base):
self.assertEqual((as_int, prefix), encoding.to_long(base, encoding.byte_to_int, as_rep))
self.assertEqual(as_rep, encoding.from_long(as_int, prefix, base, lambda v:v))
do_test(10000101, 2, h2b("00009896e5"), 256)
do_test(10000101, 3, h2b("0000009896e5"), 256)
do_test(1460765565493402645157733592332121663123460211377, 1, b'\0\xff\xde\xfeOHu\xcf\x11\x9f\xc3\xd8\xf4\xa0\x9a\xe3~\xc4\xccB\xb1', 256)
def test_to_bytes_32(self):
for i in range(256):
v = encoding.to_bytes_32(i)
self.assertEqual(v, b'\0' * 31 + bytes(bytearray([i])))
for i in range(256,512):
v = encoding.to_bytes_32(i)
self.assertEqual(v, b'\0' * 30 + bytes(bytearray([1, i&0xff])))
def test_to_from_base58(self):
def do_test(as_text, as_bin):
self.assertEqual(as_bin, encoding.a2b_base58(as_text))
self.assertEqual(as_text, encoding.b2a_base58(as_bin))
do_test("1abcdefghijkmnpqrst", b'\x00\x01\x93\\|\xf2*\xb9\xbe\x19b\xae\xe4\x8c{')
do_test("1CASrvcpMMTa4dz4DmYtAqcegCtdkhjvdn", b'\x00zr\xb6\xfac\xde6\xc4\xab\xc6\nh\xb5-\x7f3\xe3\xd7\xcd>\xc4\xba\xbd9')
do_test("1111111111111111aaaa11aa",
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00CnzQ)\x0b')
do_test("123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz",
b'\x00\x01\x11\xd3\x8e_\xc9\x07\x1f\xfc\xd2\x0bJv<\xc9\xaeO%+\xb4\xe4\x8f\xd6j\x83^%*\xda\x93\xffH\rm\xd4=\xc6*d\x11U\xa5')
def test_to_from_hashed_base58(self):
def do_test(as_text, as_bin):
self.assertEqual(as_text, encoding.b2a_hashed_base58(as_bin))
self.assertEqual(as_bin, encoding.a2b_hashed_base58(as_text))
self.assertTrue(encoding.is_hashed_base58_valid(as_text))
bogus_text = as_text[:-1] + chr(1+ord(as_text[-1]))
self.assertFalse(encoding.is_hashed_base58_valid(bogus_text))
do_test("14nr3dMd4VwNpFhFECU1A6imi", b'\x00\x01\x93\\|\xf2*\xb9\xbe\x19b\xae\xe4\x8c{')
do_test("1CASrvcpMMTa4dz4DmYtAqcegCtdkhjvdn", b'\x00zr\xb6\xfac\xde6\xc4\xab\xc6\nh\xb5-\x7f3\xe3\xd7\xcd>')
do_test("11111111111111114njGbaozZJui9o",
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00CnzQ)\x0b')
do_test("1mLRia5CbfDB9752zxvtrpnkigecaYWUSQNLJGECA8641ywusqomjhfdb6EM7bXGj1Gb",
b'\x00\x01\x11\xd3\x8e_\xc9\x07\x1f\xfc\xd2\x0bJv<\xc9\xaeO%+\xb4\xe4\x8f\xd6j\x83^%*\xda\x93\xffH\rm\xd4=\xc6*d\x11U\xa5aaaa')
def test_double_sha256(self):
def do_test(blob, expected_hash):
self.assertEqual(encoding.double_sha256(blob), expected_hash)
do_test(b"This is a test",
b'\xea\xc6I\xd41\xaa?\xc2\xd5t\x9d\x1aP!\xbb\xa7\x81.\xc8;\x8aY\xfa\x84\x0b\xffu\xc1\x7f\x8af\\')
do_test(b"The quick brown fox jumps over the lazy dogs",
b'\x8a5e\x88yz\x90\x1a\x11\x03\x17y\xd4xz\xd0E~\xb0\x82\xc5k\xd9\xb6W\x15z\xcf1\xba\xe6\xc4')
do_test(b'\x74' * 10000,
b'nMw6\xaa7<G\x18\xee\xf2\xb9E(\xfe\xd5u\x19\xa0\xbd\xc3\xa8\xf40\n\xee7,\xbe\xde\xa9\xa0')
def test_hash160(self):
def do_test(blob, expected_hash):
self.assertEqual(encoding.hash160(blob), expected_hash)
do_test(b"This is a test",
b'\x18\xac\x98\xfa*$\x12\xdd\xb7]\xe6\x04Y\xb5*\xcd\x98\xf2\xd9r')
do_test(b"The quick brown fox jumps over the lazy dogs",
b'v\xc9\xd1\xf3\xaaR&UN G_\x91\x9a\xad\xd1t\xf7\xe9\xb7')
do_test(b'\x74' * 10000,
b'\xa9a\x07\x02\x96gt\x01\xa5~\xae\r\x96\xd1MZ\x88\n,A')
def test_wif_to_from_secret_exponent(self):
def do_test(as_secret_exponent, as_wif, is_compressed):
self.assertEqual(as_wif, encoding.secret_exponent_to_wif(as_secret_exponent, compressed=is_compressed))
se, comp = encoding.wif_to_tuple_of_secret_exponent_compressed(as_wif)
self.assertEqual(se, as_secret_exponent)
self.assertEqual(comp, is_compressed)
self.assertTrue(encoding.is_valid_wif(as_wif))
WIF_LIST = [
"5HwoXVkHoRM8sL2KmNRS217n1g8mPPBomrY7yehCuXC1115WWsh",
"5J5KUK3VXP8HUefNVYPxwxVRokScZdWXpu1Tj8LfaAXMqHzMmbk",
"5JCqR8LhFLuS5yJRDiNVsus5bpkTjsqFswUoUbz8EorifYA4TwJ",
"5JLMMwdtyJgahHwTwtM2osEjPu4Jv89yvyx9E5dauTC5Vs6EjBA",
"5JTsJkw6hGTjJcaWg4KZjpcPByNA6NUhz2RUyZH3a6XSL7vAYmy",
"5JbPFaEJREEsuwDZQEJ6fmz2z3g1GcoS34tpj2vWEjroARtCMBF",
"5JiuCPXW9C22XFrc8QGdbjMgn7yrSs8A67NAUWZxuPC9ziUizQP",
"5JrR9Cphs9oB8aVeraFAXgjLaCHhd7St99qWDzDRa2XWq3RVw7d",
"5Jyw627ub7aKju8hakDhTe6zNGbYoMmcCCJqyTrtEfrsfLDreVt",
"5K7T2qR7K5MUMDmkJvCEPbUeALuPyc6LFEnBiwWLuKCEVdBp8qV",
"5KExyeiK338cxYQo36AmKYrHxRDF9rR4JHFXUR9oZxXbKue7gdL",
"5KNUvU1WkzumZs3qmG9JFWDwkVX6L6jnMKisDtoGEbrxACzxk6T",
"5KVzsHJiUxgvBBgtVS7qBTbbYZpwWM4WQNCCyNSiuFCJzYMxg8H",
"5KdWp6bvCvU4nWKwDc6N7QyFLe8ngbPETQfYir6BZtXfpsnSrGS",
]
SE_LIST = [int(c * 64, 16) for c in "123456789abcde"]
for se, wif in zip(SE_LIST, WIF_LIST):
do_test(se, wif, is_compressed=False)
def test_public_pair_to_sec(self):
def do_test(as_public_pair, as_sec, is_compressed, as_hash160_sec, as_bitcoin_address):
self.assertEqual(encoding.sec_to_public_pair(as_sec), as_public_pair)
self.assertEqual(encoding.public_pair_to_sec(as_public_pair, compressed=is_compressed), as_sec)
self.assertEqual(encoding.is_sec_compressed(as_sec), is_compressed)
self.assertEqual(encoding.public_pair_to_hash160_sec(as_public_pair, compressed=is_compressed),
as_hash160_sec)
self.assertEqual(encoding.hash160_sec_to_bitcoin_address(as_hash160_sec), as_bitcoin_address)
self.assertEqual(encoding.public_pair_to_bitcoin_address(as_public_pair, compressed=is_compressed), as_bitcoin_address)
self.assertTrue(encoding.is_valid_bitcoin_address(as_bitcoin_address))
bad_address = as_bitcoin_address[:17] + chr(ord(as_bitcoin_address[17]) + 1) + as_bitcoin_address[18:]
self.assertFalse(encoding.is_valid_bitcoin_address(bad_address))
SEC_TEST_DATA = [
((35826991941973211494003564265461426073026284918572421206325859877044495085994,
25491041833361137486709012056693088297620945779048998614056404517283089805761),
"034f355bdcb7cc0af728ef3cceb9615d90684bb5b2ca5f859ab0f0b704075871aa",
True,
"fc7250a211deddc70ee5a2738de5f07817351cef",
"1Q1pE5vPGEEMqRcVRMbtBK842Y6Pzo6nK9"
),
((31855367722742370537280679280108010854876607759940877706949385967087672770343,
46659058944867745027460438812818578793297503278458148978085384795486842595210),
"02466d7fcae563e5cb09a0d1870bb580344804617879a14949cf22285f1bae3f27",
True,
"531260aa2a199e228c537dfa42c82bea2c7c1f4d",
"18aF6pYXKDSXjXHpidt2G6okdVdBr8zA7z"
),
((27341391395138457474971175971081207666803680341783085051101294443585438462385,
26772005640425216814694594224987412261034377630410179754457174380653265224672),
"023c72addb4fdf09af94f0c94d7fe92a386a7e70cf8a1d85916386bb2535c7b1b1",
True,
"3bc28d6d92d9073fb5e3adf481795eaf446bceed",
"16Syw4SugWs4siKbK8cuxJXM2ukh2GKpRi"
),
((35826991941973211494003564265461426073026284918572421206325859877044495085994,
25491041833361137486709012056693088297620945779048998614056404517283089805761),
"044f355bdcb7cc0af728ef3cceb9615d90684bb5b2ca5f859ab0f0b704075871aa"\
"385b6b1b8ead809ca67454d9683fcf2ba03456d6fe2c4abe2b07f0fbdbb2f1c1",
False,
"e4e517ee07984a4000cd7b00cbcb545911c541c4",
"1MsHWS1BnwMc3tLE8G35UXsS58fKipzB7a"
),
((31855367722742370537280679280108010854876607759940877706949385967087672770343,
46659058944867745027460438812818578793297503278458148978085384795486842595210),
"04466d7fcae563e5cb09a0d1870bb580344804617879a14949cf22285f1bae3f27"\
"6728176c3c6431f8eeda4538dc37c865e2784f3a9e77d044f33e407797e1278a",
False,
"b256082b934fe782adbacaafeadfca64c52a5384",
"1HFxLkPTtMZeo5mDpZn6CF9sh4h2ycknwr"
),
((27341391395138457474971175971081207666803680341783085051101294443585438462385,
26772005640425216814694594224987412261034377630410179754457174380653265224672),
"043c72addb4fdf09af94f0c94d7fe92a386a7e70cf8a1d85916386bb2535c7b1b1"\
"3b306b0fe085665d8fc1b28ae1676cd3ad6e08eaeda225fe38d0da4de55703e0",
False,
"edf6bbd7ba7aad222c2b28e6d8d5001178e3680c",
"1NhEipumt9Pug6pwTqMNRXhBG84K39Ebbi"
),
]
for public_pair, sec, compressed, hash160_sec, bitcoin_address in SEC_TEST_DATA:
do_test(public_pair, h2b(sec), compressed, h2b(hash160_sec), bitcoin_address)
if __name__ == '__main__':
unittest.main()
| mit | 3,333,543,564,550,442,500 | 55.711765 | 146 | 0.672648 | false |
google/glazier | testing/run_tests.py | 1 | 1660 | # Copyright 2019 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Locate *_test modules and run the tests in them."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import pkgutil
import re
import subprocess
import sys
import glazier
FAILED_RE = re.compile(r'FAILED\s*\(errors=(\d*)\)')
def main():
results = {'codes': {0: 0, 1: 0}, 'errors': 0}
for _, test, _ in pkgutil.walk_packages(glazier.__path__,
glazier.__name__ + '.'):
if '_test' in test:
print('**** %s ****\n' % test)
proc = subprocess.Popen(['python', '-m', test], stderr=subprocess.PIPE)
_, err = proc.communicate()
err = err.decode()
print(err)
failed = FAILED_RE.search(err)
if failed:
results['errors'] += int(failed.group(1))
results['codes'][proc.returncode] = results['codes'].setdefault(
proc.returncode, 0) + 1
print('Success: %s' % results['codes'][0])
print('Failure: %s' % results['codes'][1])
sys.exit(results['codes'][1])
if __name__ == '__main__':
main()
| apache-2.0 | -1,978,808,782,491,652,600 | 30.923077 | 77 | 0.642169 | false |
YueLinHo/Subversion | subversion/tests/cmdline/svntest/verify.py | 1 | 34138 | #
# verify.py: routines that handle comparison and display of expected
# vs. actual output
#
# Subversion is a tool for revision control.
# See http://subversion.tigris.org for more information.
#
# ====================================================================
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
######################################################################
import re, sys
from difflib import unified_diff, ndiff
import pprint
import logging
import svntest
logger = logging.getLogger()
######################################################################
# Exception types
class SVNUnexpectedOutput(svntest.Failure):
"""Exception raised if an invocation of svn results in unexpected
output of any kind."""
pass
class SVNUnexpectedStdout(SVNUnexpectedOutput):
"""Exception raised if an invocation of svn results in unexpected
output on STDOUT."""
pass
class SVNUnexpectedStderr(SVNUnexpectedOutput):
"""Exception raised if an invocation of svn results in unexpected
output on STDERR."""
pass
class SVNExpectedStdout(SVNUnexpectedOutput):
"""Exception raised if an invocation of svn results in no output on
STDOUT when output was expected."""
pass
class SVNExpectedStderr(SVNUnexpectedOutput):
"""Exception raised if an invocation of svn results in no output on
STDERR when output was expected."""
pass
class SVNUnexpectedExitCode(SVNUnexpectedOutput):
"""Exception raised if an invocation of svn exits with a value other
than what was expected."""
pass
class SVNIncorrectDatatype(SVNUnexpectedOutput):
"""Exception raised if invalid input is passed to the
run_and_verify_* API"""
pass
class SVNDumpParseError(svntest.Failure):
"""Exception raised if parsing a dump file fails"""
pass
######################################################################
# Comparison of expected vs. actual output
def createExpectedOutput(expected, output_type, match_all=True):
"""Return EXPECTED, promoted to an ExpectedOutput instance if not
None. Raise SVNIncorrectDatatype if the data type of EXPECTED is
not handled."""
if isinstance(expected, list):
expected = ExpectedOutput(expected)
elif isinstance(expected, str):
expected = RegexOutput(expected, match_all)
elif isinstance(expected, int):
expected = RegexOutput(".*: E%d:.*" % expected, False)
elif expected is AnyOutput:
expected = AnyOutput()
elif expected is not None and not isinstance(expected, ExpectedOutput):
raise SVNIncorrectDatatype("Unexpected type for '%s' data" % output_type)
return expected
class ExpectedOutput(object):
"""Matches an ordered list of lines.
If MATCH_ALL is True, the expected lines must match all the actual
lines, one-to-one, in the same order. If MATCH_ALL is False, the
expected lines must match a subset of the actual lines, one-to-one,
in the same order, ignoring any other actual lines among the
matching ones.
"""
def __init__(self, expected, match_all=True):
"""Initialize the expected output to EXPECTED which is a string, or
a list of strings.
See also: svntest.verify.createExpectedOutput().
"""
assert expected is not None
self.expected = expected
self.match_all = match_all
def __str__(self):
return str(self.expected)
def __cmp__(self, other):
raise TypeError("ExpectedOutput does not implement direct comparison; "
"see the 'matches()' method")
def matches(self, actual):
"""Return whether SELF matches ACTUAL (which may be a list
of newline-terminated lines, or a single string).
"""
assert actual is not None
expected = self.expected
if not isinstance(expected, list):
expected = [expected]
if not isinstance(actual, list):
actual = [actual]
if self.match_all:
return expected == actual
i_expected = 0
for actual_line in actual:
if expected[i_expected] == actual_line:
i_expected += 1
if i_expected == len(expected):
return True
return False
def display_differences(self, message, label, actual):
"""Show the differences between the expected and ACTUAL lines. Print
MESSAGE unless it is None, the expected lines, the ACTUAL lines,
and a diff, all labeled with LABEL.
"""
display_lines(message, self.expected, actual, label, label)
display_lines_diff(self.expected, actual, label, label)
class AnyOutput(ExpectedOutput):
"""Matches any non-empty output.
"""
def __init__(self):
ExpectedOutput.__init__(self, [], False)
def matches(self, actual):
assert actual is not None
if len(actual) == 0:
# No actual output. No match.
return False
for line in actual:
# If any line has some text, then there is output, so we match.
if line:
return True
# We did not find a line with text. No match.
return False
def display_differences(self, message, label, actual):
if message:
logger.warn(message)
class RegexOutput(ExpectedOutput):
"""Matches a single regular expression.
If MATCH_ALL is true, every actual line must match the RE. If
MATCH_ALL is false, at least one actual line must match the RE. In
any case, there must be at least one line of actual output.
"""
def __init__(self, expected, match_all=True):
"EXPECTED is a regular expression string."
assert isinstance(expected, str) or isinstance(expected, bytes)
ExpectedOutput.__init__(self, expected, match_all)
self.expected_re = re.compile(expected)
def matches(self, actual):
assert actual is not None
if not isinstance(actual, list):
actual = [actual]
# If a regex was provided assume that we require some actual output.
# Fail if we don't have any.
if len(actual) == 0:
return False
if self.match_all:
return all(self.expected_re.match(line) for line in actual)
else:
return any(self.expected_re.match(line) for line in actual)
def display_differences(self, message, label, actual):
display_lines(message, self.expected, actual, label + ' (regexp)', label)
def insert(self, index, line):
self.expected.insert(index, line)
self.expected_re = re.compile(self.expected)
class RegexListOutput(ExpectedOutput):
"""Matches an ordered list of regular expressions.
If MATCH_ALL is True, the expressions must match all the actual
lines, one-to-one, in the same order. If MATCH_ALL is False, the
expressions must match a subset of the actual lines, one-to-one, in
the same order, ignoring any other actual lines among the matching
ones.
In any case, there must be at least one line of actual output.
"""
def __init__(self, expected, match_all=True):
"EXPECTED is a list of regular expression strings."
assert isinstance(expected, list)
ExpectedOutput.__init__(self, expected, match_all)
self.expected_res = [re.compile(e) for e in expected]
def matches(self, actual):
assert actual is not None
if not isinstance(actual, list):
actual = [actual]
if self.match_all:
return (len(self.expected_res) == len(actual) and
all(e.match(a) for e, a in zip(self.expected_res, actual)))
i_expected = 0
for actual_line in actual:
if self.expected_res[i_expected].match(actual_line):
i_expected += 1
if i_expected == len(self.expected_res):
return True
return False
def display_differences(self, message, label, actual):
display_lines(message, self.expected, actual, label + ' (regexp)', label)
def insert(self, index, line):
self.expected.insert(index, line)
self.expected_res = [re.compile(e) for e in self.expected]
class UnorderedOutput(ExpectedOutput):
"""Matches an unordered list of lines.
The expected lines must match all the actual lines, one-to-one, in
any order.
"""
def __init__(self, expected):
assert isinstance(expected, list)
ExpectedOutput.__init__(self, expected)
def matches(self, actual):
if not isinstance(actual, list):
actual = [actual]
return sorted(self.expected) == sorted(actual)
def display_differences(self, message, label, actual):
display_lines(message, self.expected, actual, label + ' (unordered)', label)
display_lines_diff(self.expected, actual, label + ' (unordered)', label)
class UnorderedRegexListOutput(ExpectedOutput):
"""Matches an unordered list of regular expressions.
The expressions must match all the actual lines, one-to-one, in any
order.
Note: This can give a false negative result (no match) when there is
an actual line that matches multiple expressions and a different
actual line that matches some but not all of those same
expressions. The implementation matches each expression in turn to
the first unmatched actual line that it can match, and does not try
all the permutations when there are multiple possible matches.
"""
def __init__(self, expected):
assert isinstance(expected, list)
ExpectedOutput.__init__(self, expected)
def matches(self, actual):
assert actual is not None
if not isinstance(actual, list):
actual = [actual]
if len(self.expected) != len(actual):
return False
for e in self.expected:
expect_re = re.compile(e)
for actual_line in actual:
if expect_re.match(actual_line):
actual.remove(actual_line)
break
else:
# One of the regexes was not found
return False
return True
def display_differences(self, message, label, actual):
display_lines(message, self.expected, actual,
label + ' (regexp) (unordered)', label)
class AlternateOutput(ExpectedOutput):
"""Matches any one of a list of ExpectedOutput instances.
"""
def __init__(self, expected, match_all=True):
"EXPECTED is a list of ExpectedOutput instances."
assert isinstance(expected, list) and expected != []
assert all(isinstance(e, ExpectedOutput) for e in expected)
ExpectedOutput.__init__(self, expected)
def matches(self, actual):
assert actual is not None
for e in self.expected:
if e.matches(actual):
return True
return False
def display_differences(self, message, label, actual):
# For now, just display differences against the first alternative.
e = self.expected[0]
e.display_differences(message, label, actual)
######################################################################
# Displaying expected and actual output
def display_trees(message, label, expected, actual):
'Print two trees, expected and actual.'
if message is not None:
logger.warn(message)
if expected is not None:
logger.warn('EXPECTED %s:', label)
svntest.tree.dump_tree(expected)
if actual is not None:
logger.warn('ACTUAL %s:', label)
svntest.tree.dump_tree(actual)
def display_lines_diff(expected, actual, expected_label, actual_label):
"""Print a unified diff between EXPECTED (labeled with EXPECTED_LABEL)
and ACTUAL (labeled with ACTUAL_LABEL).
Each of EXPECTED and ACTUAL is a string or a list of strings.
"""
if not isinstance(expected, list):
expected = [expected]
if not isinstance(actual, list):
actual = [actual]
logger.warn('DIFF ' + expected_label + ':')
for x in unified_diff(expected, actual,
fromfile='EXPECTED ' + expected_label,
tofile='ACTUAL ' + actual_label):
logger.warn('| ' + x.rstrip())
def display_lines(message, expected, actual,
expected_label, actual_label=None):
"""Print MESSAGE, unless it is None, then print EXPECTED (labeled
with EXPECTED_LABEL) followed by ACTUAL (labeled with ACTUAL_LABEL).
Each of EXPECTED and ACTUAL is a string or a list of strings.
"""
if message is not None:
logger.warn(message)
if type(expected) is str:
expected = [expected]
if type(actual) is str:
actual = [actual]
if actual_label is None:
actual_label = expected_label
if expected is not None:
logger.warn('EXPECTED %s:', expected_label)
for x in expected:
logger.warn('| ' + x.rstrip())
if actual is not None:
logger.warn('ACTUAL %s:', actual_label)
for x in actual:
logger.warn('| ' + x.rstrip())
def compare_and_display_lines(message, label, expected, actual,
raisable=None):
"""Compare two sets of output lines, and print them if they differ,
preceded by MESSAGE iff not None. EXPECTED may be an instance of
ExpectedOutput (and if not, it is wrapped as such). ACTUAL may be a
list of newline-terminated lines, or a single string. RAISABLE is an
exception class, an instance of which is thrown if ACTUAL doesn't
match EXPECTED."""
if raisable is None:
raisable = svntest.main.SVNLineUnequal
### It'd be nicer to use createExpectedOutput() here, but its
### semantics don't match all current consumers of this function.
assert expected is not None
assert actual is not None
if not isinstance(expected, ExpectedOutput):
expected = ExpectedOutput(expected)
actual = svntest.main.ensure_list(actual)
if len(actual) > 0:
is_binary = not isinstance(actual[0], str)
actual = svntest.main.filter_dbg(actual, is_binary)
if not expected.matches(actual):
expected.display_differences(message, label, actual)
raise raisable
def verify_outputs(message, actual_stdout, actual_stderr,
expected_stdout, expected_stderr, all_stdout=True):
"""Compare and display expected vs. actual stderr and stdout lines:
if they don't match, print the difference (preceded by MESSAGE iff
not None) and raise an exception.
If EXPECTED_STDERR or EXPECTED_STDOUT is a string the string is
interpreted as a regular expression. For EXPECTED_STDOUT and
ACTUAL_STDOUT to match, every line in ACTUAL_STDOUT must match the
EXPECTED_STDOUT regex, unless ALL_STDOUT is false. For
EXPECTED_STDERR regexes only one line in ACTUAL_STDERR need match."""
expected_stderr = createExpectedOutput(expected_stderr, 'stderr', False)
expected_stdout = createExpectedOutput(expected_stdout, 'stdout', all_stdout)
for (actual, expected, label, raisable) in (
(actual_stderr, expected_stderr, 'STDERR', SVNExpectedStderr),
(actual_stdout, expected_stdout, 'STDOUT', SVNExpectedStdout)):
if expected is None:
continue
if isinstance(expected, RegexOutput):
raisable = svntest.main.SVNUnmatchedError
elif not isinstance(expected, AnyOutput):
raisable = svntest.main.SVNLineUnequal
compare_and_display_lines(message, label, expected, actual, raisable)
def verify_exit_code(message, actual, expected,
raisable=SVNUnexpectedExitCode):
"""Compare and display expected vs. actual exit codes:
if they don't match, print the difference (preceded by MESSAGE iff
not None) and raise an exception."""
if expected != actual:
display_lines(message, str(expected), str(actual), "Exit Code")
raise raisable
# A simple dump file parser. While sufficient for the current
# testsuite it doesn't cope with all valid dump files.
class DumpParser:
def __init__(self, lines):
self.current = 0
self.lines = lines
self.parsed = {}
def parse_line(self, regex, required=True):
m = re.match(regex, self.lines[self.current])
if not m:
if required:
raise SVNDumpParseError("expected '%s' at line %d\n%s"
"\nPrevious lines:\n%s"
% (regex, self.current,
self.lines[self.current],
''.join(self.lines[max(0,self.current - 10):self.current])))
else:
return None
self.current += 1
return m.group(1)
def parse_blank(self, required=True):
if self.lines[self.current] != b'\n': # Works on Windows
if required:
raise SVNDumpParseError("expected blank at line %d\n%s"
% (self.current, self.lines[self.current]))
else:
return False
self.current += 1
return True
def parse_header(self, header):
regex = b'([^:]*): (.*)$'
m = re.match(regex, self.lines[self.current])
if not m:
raise SVNDumpParseError("expected a header at line %d, but found:\n%s"
% (self.current, self.lines[self.current]))
self.current += 1
return m.groups()
def parse_headers(self):
headers = []
while self.lines[self.current] != b'\n':
key, val = self.parse_header(self)
headers.append((key, val))
return headers
def parse_boolean(self, header, required):
return self.parse_line(header + b': (false|true)$', required)
def parse_format(self):
return self.parse_line(b'SVN-fs-dump-format-version: ([0-9]+)$')
def parse_uuid(self):
return self.parse_line(b'UUID: ([0-9a-z-]+)$')
def parse_revision(self):
return self.parse_line(b'Revision-number: ([0-9]+)$')
def parse_prop_delta(self):
return self.parse_line(b'Prop-delta: (false|true)$', required=False)
def parse_prop_length(self, required=True):
return self.parse_line(b'Prop-content-length: ([0-9]+)$', required)
def parse_content_length(self, required=True):
return self.parse_line(b'Content-length: ([0-9]+)$', required)
def parse_path(self):
path = self.parse_line(b'Node-path: (.*)$', required=False)
return path
def parse_kind(self):
return self.parse_line(b'Node-kind: (.+)$', required=False)
def parse_action(self):
return self.parse_line(b'Node-action: ([0-9a-z-]+)$')
def parse_copyfrom_rev(self):
return self.parse_line(b'Node-copyfrom-rev: ([0-9]+)$', required=False)
def parse_copyfrom_path(self):
path = self.parse_line(b'Node-copyfrom-path: (.+)$', required=False)
if not path and self.lines[self.current] == 'Node-copyfrom-path: \n':
self.current += 1
path = ''
return path
def parse_copy_md5(self):
return self.parse_line(b'Text-copy-source-md5: ([0-9a-z]+)$', required=False)
def parse_copy_sha1(self):
return self.parse_line(b'Text-copy-source-sha1: ([0-9a-z]+)$', required=False)
def parse_text_md5(self):
return self.parse_line(b'Text-content-md5: ([0-9a-z]+)$', required=False)
def parse_text_sha1(self):
return self.parse_line(b'Text-content-sha1: ([0-9a-z]+)$', required=False)
def parse_text_delta(self):
return self.parse_line(b'Text-delta: (false|true)$', required=False)
def parse_text_delta_base_md5(self):
return self.parse_line(b'Text-delta-base-md5: ([0-9a-f]+)$', required=False)
def parse_text_delta_base_sha1(self):
return self.parse_line(b'Text-delta-base-sha1: ([0-9a-f]+)$', required=False)
def parse_text_length(self):
return self.parse_line(b'Text-content-length: ([0-9]+)$', required=False)
def get_props(self):
props = []
while not re.match(b'PROPS-END$', self.lines[self.current]):
props.append(self.lines[self.current])
self.current += 1
self.current += 1
# Split into key/value pairs to do an unordered comparison.
# This parses the serialized hash under the assumption that it is valid.
prophash = {}
curprop = [0]
while curprop[0] < len(props):
def read_key_or_value(curprop):
# klen / vlen
klen = int(props[curprop[0]].split()[1])
curprop[0] += 1
# key / value
key = b''
while len(key) != klen + 1:
key += props[curprop[0]]
curprop[0] += 1
key = key[:-1]
return key
if props[curprop[0]].startswith(b'K'):
key = read_key_or_value(curprop)
value = read_key_or_value(curprop)
elif props[curprop[0]].startswith(b'D'):
key = read_key_or_value(curprop)
value = None
else:
raise
prophash[key] = value
return prophash
def get_content(self, length):
content = b''
while len(content) < length:
content += self.lines[self.current]
self.current += 1
if len(content) == length + 1:
content = content[:-1]
elif len(content) != length:
raise SVNDumpParseError("content length expected %d actual %d at line %d"
% (length, len(content), self.current))
return content
def parse_one_node(self):
node = {}
# optional 'kind' and required 'action' must be next
node['kind'] = self.parse_kind()
action = self.parse_action()
# read any remaining headers
headers_list = self.parse_headers()
headers = dict(headers_list)
# Content-length must be last, if present
if b'Content-length' in headers and headers_list[-1][0] != b'Content-length':
raise SVNDumpParseError("'Content-length' header is not last, "
"in header block ending at line %d"
% (self.current,))
# parse the remaining optional headers and store in specific keys in NODE
for key, header, regex in [
('copyfrom_rev', b'Node-copyfrom-rev', b'([0-9]+)$'),
('copyfrom_path', b'Node-copyfrom-path', b'(.*)$'),
('copy_md5', b'Text-copy-source-md5', b'([0-9a-z]+)$'),
('copy_sha1', b'Text-copy-source-sha1',b'([0-9a-z]+)$'),
('prop_length', b'Prop-content-length', b'([0-9]+)$'),
('text_length', b'Text-content-length', b'([0-9]+)$'),
('text_md5', b'Text-content-md5', b'([0-9a-z]+)$'),
('text_sha1', b'Text-content-sha1', b'([0-9a-z]+)$'),
('content_length', b'Content-length', b'([0-9]+)$'),
]:
if not header in headers:
node[key] = None
continue
m = re.match(regex, headers[header])
if not m:
raise SVNDumpParseError("expected '%s' at line %d\n%s"
% (regex, self.current,
self.lines[self.current]))
node[key] = m.group(1)
self.parse_blank()
if node['prop_length']:
node['props'] = self.get_props()
if node['text_length']:
node['content'] = self.get_content(int(node['text_length']))
# Hard to determine how may blanks is 'correct' (a delete that is
# followed by an add that is a replace and a copy has one fewer
# than expected but that can't be predicted until seeing the add)
# so allow arbitrary number
blanks = 0
while self.current < len(self.lines) and self.parse_blank(required=False):
blanks += 1
node['blanks'] = blanks
return action, node
def parse_all_nodes(self):
nodes = {}
while True:
if self.current >= len(self.lines):
break
path = self.parse_path()
if path is None:
break
if not nodes.get(path):
nodes[path] = {}
action, node = self.parse_one_node()
if nodes[path].get(action):
raise SVNDumpParseError("duplicate action '%s' for node '%s' at line %d"
% (action, path, self.current))
nodes[path][action] = node
return nodes
def parse_one_revision(self):
revision = {}
number = self.parse_revision()
revision['prop_length'] = self.parse_prop_length()
revision['content_length'] = self.parse_content_length()
self.parse_blank()
revision['props'] = self.get_props()
self.parse_blank()
revision['nodes'] = self.parse_all_nodes()
return number, revision
def parse_all_revisions(self):
while self.current < len(self.lines):
number, revision = self.parse_one_revision()
if self.parsed.get(number):
raise SVNDumpParseError("duplicate revision %d at line %d"
% (number, self.current))
self.parsed[number] = revision
def parse(self):
self.parsed['format'] = self.parse_format()
self.parse_blank()
self.parsed['uuid'] = self.parse_uuid()
self.parse_blank()
self.parse_all_revisions()
return self.parsed
def compare_dump_files(message, label, expected, actual,
ignore_uuid=False,
expect_content_length_always=False,
ignore_empty_prop_sections=False,
ignore_number_of_blank_lines=False):
"""Parse two dump files EXPECTED and ACTUAL, both of which are lists
of lines as returned by run_and_verify_dump, and check that the same
revisions, nodes, properties, etc. are present in both dumps.
"""
parsed_expected = DumpParser(expected).parse()
parsed_actual = DumpParser(actual).parse()
if ignore_uuid:
parsed_expected['uuid'] = '<ignored>'
parsed_actual['uuid'] = '<ignored>'
for parsed in [parsed_expected, parsed_actual]:
for rev_name, rev_record in parsed.items():
#print "Found %s" % (rev_name,)
if b'nodes' in rev_record:
#print "Found %s.%s" % (rev_name, 'nodes')
for path_name, path_record in rev_record['nodes'].items():
#print "Found %s.%s.%s" % (rev_name, 'nodes', path_name)
for action_name, action_record in path_record.items():
#print "Found %s.%s.%s.%s" % (rev_name, 'nodes', path_name, action_name)
if expect_content_length_always:
if action_record.get('content_length') == None:
#print 'Adding: %s.%s.%s.%s.%s' % (rev_name, 'nodes', path_name, action_name, 'content_length=0')
action_record['content_length'] = '0'
if ignore_empty_prop_sections:
if action_record.get('prop_length') == '10':
#print 'Removing: %s.%s.%s.%s.%s' % (rev_name, 'nodes', path_name, action_name, 'prop_length')
action_record['prop_length'] = None
del action_record['props']
old_content_length = int(action_record['content_length'])
action_record['content_length'] = str(old_content_length - 10)
if ignore_number_of_blank_lines:
action_record['blanks'] = 0
if parsed_expected != parsed_actual:
print('DIFF of raw dumpfiles (including expected differences)')
print(''.join(ndiff(expected, actual)))
raise svntest.Failure('DIFF of parsed dumpfiles (ignoring expected differences)\n'
+ '\n'.join(ndiff(
pprint.pformat(parsed_expected).splitlines(),
pprint.pformat(parsed_actual).splitlines())))
##########################################################################################
## diff verifications
def is_absolute_url(target):
return (target.startswith('file://')
or target.startswith('http://')
or target.startswith('https://')
or target.startswith('svn://')
or target.startswith('svn+ssh://'))
def make_diff_header(path, old_tag, new_tag, src_label=None, dst_label=None):
"""Generate the expected diff header for file PATH, with its old and new
versions described in parentheses by OLD_TAG and NEW_TAG. SRC_LABEL and
DST_LABEL are paths or urls that are added to the diff labels if we're
diffing against the repository or diffing two arbitrary paths.
Return the header as an array of newline-terminated strings."""
if src_label:
src_label = src_label.replace('\\', '/')
if not is_absolute_url(src_label):
src_label = '.../' + src_label
src_label = '\t(' + src_label + ')'
else:
src_label = ''
if dst_label:
dst_label = dst_label.replace('\\', '/')
if not is_absolute_url(dst_label):
dst_label = '.../' + dst_label
dst_label = '\t(' + dst_label + ')'
else:
dst_label = ''
path_as_shown = path.replace('\\', '/')
return [
"Index: " + path_as_shown + "\n",
"===================================================================\n",
"--- " + path_as_shown + src_label + "\t(" + old_tag + ")\n",
"+++ " + path_as_shown + dst_label + "\t(" + new_tag + ")\n",
]
def make_no_diff_deleted_header(path, old_tag, new_tag):
"""Generate the expected diff header for a deleted file PATH when in
'no-diff-deleted' mode. (In that mode, no further details appear after the
header.) Return the header as an array of newline-terminated strings."""
path_as_shown = path.replace('\\', '/')
return [
"Index: " + path_as_shown + " (deleted)\n",
"===================================================================\n",
]
def make_git_diff_header(target_path, repos_relpath,
old_tag, new_tag, add=False, src_label=None,
dst_label=None, delete=False, text_changes=True,
cp=False, mv=False, copyfrom_path=None,
copyfrom_rev=None):
""" Generate the expected 'git diff' header for file TARGET_PATH.
REPOS_RELPATH is the location of the path relative to the repository root.
The old and new versions ("revision X", or "working copy") must be
specified in OLD_TAG and NEW_TAG.
SRC_LABEL and DST_LABEL are paths or urls that are added to the diff
labels if we're diffing against the repository. ADD, DELETE, CP and MV
denotes the operations performed on the file. COPYFROM_PATH is the source
of a copy or move. Return the header as an array of newline-terminated
strings."""
path_as_shown = target_path.replace('\\', '/')
if src_label:
src_label = src_label.replace('\\', '/')
src_label = '\t(.../' + src_label + ')'
else:
src_label = ''
if dst_label:
dst_label = dst_label.replace('\\', '/')
dst_label = '\t(.../' + dst_label + ')'
else:
dst_label = ''
output = [
"Index: " + path_as_shown + "\n",
"===================================================================\n"
]
if add:
output.extend([
"diff --git a/" + repos_relpath + " b/" + repos_relpath + "\n",
"new file mode 100644\n",
])
if text_changes:
output.extend([
"--- /dev/null\t(" + old_tag + ")\n",
"+++ b/" + repos_relpath + dst_label + "\t(" + new_tag + ")\n"
])
elif delete:
output.extend([
"diff --git a/" + repos_relpath + " b/" + repos_relpath + "\n",
"deleted file mode 100644\n",
])
if text_changes:
output.extend([
"--- a/" + repos_relpath + src_label + "\t(" + old_tag + ")\n",
"+++ /dev/null\t(" + new_tag + ")\n"
])
elif cp:
if copyfrom_rev:
copyfrom_rev = '@' + copyfrom_rev
else:
copyfrom_rev = ''
output.extend([
"diff --git a/" + copyfrom_path + " b/" + repos_relpath + "\n",
"copy from " + copyfrom_path + copyfrom_rev + "\n",
"copy to " + repos_relpath + "\n",
])
if text_changes:
output.extend([
"--- a/" + copyfrom_path + src_label + "\t(" + old_tag + ")\n",
"+++ b/" + repos_relpath + "\t(" + new_tag + ")\n"
])
elif mv:
output.extend([
"diff --git a/" + copyfrom_path + " b/" + path_as_shown + "\n",
"rename from " + copyfrom_path + "\n",
"rename to " + repos_relpath + "\n",
])
if text_changes:
output.extend([
"--- a/" + copyfrom_path + src_label + "\t(" + old_tag + ")\n",
"+++ b/" + repos_relpath + "\t(" + new_tag + ")\n"
])
else:
output.extend([
"diff --git a/" + repos_relpath + " b/" + repos_relpath + "\n",
"--- a/" + repos_relpath + src_label + "\t(" + old_tag + ")\n",
"+++ b/" + repos_relpath + dst_label + "\t(" + new_tag + ")\n",
])
return output
def make_diff_prop_header(path):
"""Return a property diff sub-header, as a list of newline-terminated
strings."""
return [
"\n",
"Property changes on: " + path.replace('\\', '/') + "\n",
"___________________________________________________________________\n"
]
def make_diff_prop_val(plus_minus, pval):
"Return diff for prop value PVAL, with leading PLUS_MINUS (+ or -)."
if len(pval) > 0 and pval[-1] != '\n':
return [plus_minus + pval + "\n","\\ No newline at end of property\n"]
return [plus_minus + pval]
def make_diff_prop_deleted(pname, pval):
"""Return a property diff for deletion of property PNAME, old value PVAL.
PVAL is a single string with no embedded newlines. Return the result
as a list of newline-terminated strings."""
return [
"Deleted: " + pname + "\n",
"## -1 +0,0 ##\n"
] + make_diff_prop_val("-", pval)
def make_diff_prop_added(pname, pval):
"""Return a property diff for addition of property PNAME, new value PVAL.
PVAL is a single string with no embedded newlines. Return the result
as a list of newline-terminated strings."""
return [
"Added: " + pname + "\n",
"## -0,0 +1 ##\n",
] + make_diff_prop_val("+", pval)
def make_diff_prop_modified(pname, pval1, pval2):
"""Return a property diff for modification of property PNAME, old value
PVAL1, new value PVAL2.
PVAL is a single string with no embedded newlines. A newline at the
end is significant: without it, we add an extra line saying '\ No
newline at end of property'.
Return the result as a list of newline-terminated strings.
"""
return [
"Modified: " + pname + "\n",
"## -1 +1 ##\n",
] + make_diff_prop_val("-", pval1) + make_diff_prop_val("+", pval2)
| apache-2.0 | 8,446,738,257,630,799,000 | 34.671891 | 113 | 0.620247 | false |
BT-astauder/odoo | openerp/tools/cache.py | 100 | 5907 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2013 OpenERP (<http://www.openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
# decorator makes wrappers that have the same API as their wrapped function;
# this is important for the openerp.api.guess() that relies on signatures
from decorator import decorator
from inspect import getargspec
import lru
import logging
logger = logging.getLogger(__name__)
class ormcache(object):
""" LRU cache decorator for orm methods. """
def __init__(self, skiparg=2, size=8192, multi=None, timeout=None):
self.skiparg = skiparg
self.size = size
self.stat_miss = 0
self.stat_hit = 0
self.stat_err = 0
def __call__(self, method):
self.method = method
lookup = decorator(self.lookup, method)
lookup.clear_cache = self.clear
return lookup
def stat(self):
return "lookup-stats hit=%s miss=%s err=%s ratio=%.1f" % \
(self.stat_hit, self.stat_miss, self.stat_err,
(100*float(self.stat_hit))/(self.stat_miss+self.stat_hit))
def lru(self, model):
ormcache = model._ormcache
try:
d = ormcache[self.method]
except KeyError:
d = ormcache[self.method] = lru.LRU(self.size)
return d
def lookup(self, method, *args, **kwargs):
d = self.lru(args[0])
key = args[self.skiparg:]
try:
r = d[key]
self.stat_hit += 1
return r
except KeyError:
self.stat_miss += 1
value = d[key] = self.method(*args, **kwargs)
return value
except TypeError:
self.stat_err += 1
return self.method(*args, **kwargs)
def clear(self, model, *args):
""" Remove *args entry from the cache or all keys if *args is undefined """
d = self.lru(model)
if args:
logger.warn("ormcache.clear arguments are deprecated and ignored "
"(while clearing caches on (%s).%s)",
model._name, self.method.__name__)
d.clear()
model.pool._any_cache_cleared = True
class ormcache_context(ormcache):
def __init__(self, skiparg=2, size=8192, accepted_keys=()):
super(ormcache_context,self).__init__(skiparg,size)
self.accepted_keys = accepted_keys
def __call__(self, method):
# remember which argument is context
args = getargspec(method)[0]
self.context_pos = args.index('context')
return super(ormcache_context, self).__call__(method)
def lookup(self, method, *args, **kwargs):
d = self.lru(args[0])
# Note. The decorator() wrapper (used in __call__ above) will resolve
# arguments, and pass them positionally to lookup(). This is why context
# is not passed through kwargs!
if self.context_pos < len(args):
context = args[self.context_pos]
else:
context = kwargs.get('context') or {}
ckey = [(k, context[k]) for k in self.accepted_keys if k in context]
# Beware: do not take the context from args!
key = args[self.skiparg:self.context_pos] + tuple(ckey)
try:
r = d[key]
self.stat_hit += 1
return r
except KeyError:
self.stat_miss += 1
value = d[key] = self.method(*args, **kwargs)
return value
except TypeError:
self.stat_err += 1
return self.method(*args, **kwargs)
class ormcache_multi(ormcache):
def __init__(self, skiparg=2, size=8192, multi=3):
assert skiparg <= multi
super(ormcache_multi, self).__init__(skiparg, size)
self.multi = multi
def lookup(self, method, *args, **kwargs):
d = self.lru(args[0])
base_key = args[self.skiparg:self.multi] + args[self.multi+1:]
ids = args[self.multi]
result = {}
missed = []
# first take what is available in the cache
for i in ids:
key = base_key + (i,)
try:
result[i] = d[key]
self.stat_hit += 1
except Exception:
self.stat_miss += 1
missed.append(i)
if missed:
# call the method for the ids that were not in the cache
args = list(args)
args[self.multi] = missed
result.update(method(*args, **kwargs))
# store those new results back in the cache
for i in missed:
key = base_key + (i,)
d[key] = result[i]
return result
class dummy_cache(object):
""" Cache decorator replacement to actually do no caching. """
def __init__(self, *l, **kw):
pass
def __call__(self, fn):
fn.clear_cache = self.clear
return fn
def clear(self, *l, **kw):
pass
# For backward compatibility
cache = ormcache
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | 3,479,117,339,764,277,000 | 32 | 83 | 0.562722 | false |
GeoscienceAustralia/Geodesy-Web-Services | aws/amazonia/test/unit_tests/test_stack.py | 2 | 27223 | from amazonia.classes.api_gateway_config import ApiGatewayMethodConfig
from amazonia.classes.api_gateway_config import ApiGatewayResponseConfig, ApiGatewayRequestConfig
from amazonia.classes.asg_config import AsgConfig
from amazonia.classes.block_devices_config import BlockDevicesConfig
from amazonia.classes.cf_distribution_config import CFDistributionConfig, CFOriginsConfig, CFCacheBehaviorConfig
from amazonia.classes.database_config import DatabaseConfig
from amazonia.classes.elb_config import ElbConfig, ElbListenersConfig
from amazonia.classes.lambda_config import LambdaConfig
from amazonia.classes.stack import Stack, DuplicateUnitNameError
from amazonia.classes.util import get_cf_friendly_name
from nose.tools import *
from troposphere import Tags, Ref
userdata = keypair = instance_type = code_deploy_service_role = vpc_cidr = public_cidr = \
minsize = maxsize = elb_health_check = nat_image_id = jump_image_id = unit_image_id = health_check_grace_period = \
health_check_type = db_instance_type = db_engine = db_port = db_hdd_size = owner_emails = \
db_backup_window = db_backup_retention = db_maintenance_window = db_storage_type = block_devices_config = \
elb_listeners_config = healthy_threshold = unhealthy_threshold = interval = timeout = sticky_app_cookies = None
availability_zones = []
home_cidrs = []
instance_port = []
loadbalancer_port = []
instance_protocol = []
loadbalancer_protocol = []
def setup_resources():
global userdata, availability_zones, keypair, instance_type, code_deploy_service_role, vpc_cidr, \
public_cidr, instance_port, loadbalancer_port, instance_protocol, loadbalancer_protocol, minsize, maxsize, \
elb_health_check, home_cidrs, nat_image_id, jump_image_id, health_check_grace_period, health_check_type, \
unit_image_id, db_instance_type, db_engine, db_port, db_hdd_size, owner_emails, \
db_backup_window, db_backup_retention, db_maintenance_window, db_storage_type, block_devices_config, \
elb_listeners_config, healthy_threshold, unhealthy_threshold, interval, timeout
userdata = """#cloud-config
repo_update: true
repo_upgrade: all
packages:
- httpd
runcmd:
- service httpd start
"""
availability_zones = ['ap-southeast-2a', 'ap-southeast-2b', 'ap-southeast-2c']
keypair = 'INSERT_YOUR_KEYPAIR_HERE'
nat_image_id = 'ami-53371f30'
jump_image_id = 'ami-dc361ebf'
unit_image_id = 'ami-dc361ebf'
instance_type = 't2.nano'
code_deploy_service_role = 'arn:aws:iam::1234567890124 :role/CodeDeployServiceRole'
vpc_cidr = {'name': 'VPC', 'cidr': '10.0.0.0/16'}
home_cidrs = [{'name': 'GA', 'cidr': '123.123.12.34/32'}, {'name': 'home', 'cidr': '192.168.0.1/16'}]
instance_port = ['80']
loadbalancer_port = ['80']
instance_protocol = ['HTTP']
loadbalancer_protocol = ['HTTP']
minsize = 1
maxsize = 1
elb_health_check = 'HTTP:80/index.html'
healthy_threshold = 10
unhealthy_threshold = 2
interval = 300
timeout = 30
sticky_app_cookie = 'JSESSION'
public_cidr = {'name': 'PublicIp', 'cidr': '0.0.0.0/0'}
health_check_grace_period = 300
health_check_type = 'ELB'
owner_emails = ['[email protected]']
db_instance_type = 'db.m1.small'
db_engine = 'postgres'
db_port = '5432'
db_hdd_size = 5
db_backup_window = '17:00-17:30'
db_backup_retention = '4'
db_maintenance_window = 'Mon:01:00-Mon:01:30'
db_storage_type = 'gp2'
block_devices_config = [BlockDevicesConfig(
device_name='/dev/xvda',
ebs_volume_size='15',
ebs_volume_type='gp2',
ebs_encrypted=False,
ebs_snapshot_id=None,
virtual_name=False), BlockDevicesConfig(
device_name='/dev/sda2',
ebs_volume_size='',
ebs_volume_type='',
ebs_encrypted=False,
ebs_snapshot_id='',
virtual_name=True
)]
elb_listeners_config = [
ElbListenersConfig(
instance_port='80',
loadbalancer_port='80',
loadbalancer_protocol='HTTP',
instance_protocol='HTTP',
sticky_app_cookie=sticky_app_cookie
)]
@with_setup(setup_resources)
def test_stack():
""" Test stack structure
"""
stack = create_stack()
assert_equals(stack.code_deploy_service_role, code_deploy_service_role)
assert_equals(stack.keypair, keypair)
assert_equals(stack.availability_zones, availability_zones)
assert_equals(stack.vpc_cidr, vpc_cidr)
[assert_equals(stack.home_cidrs[num], home_cidrs[num]) for num in range(len(home_cidrs))]
assert_equals(stack.public_cidr, {'name': 'PublicIp', 'cidr': '0.0.0.0/0'})
assert_equals(stack.internet_gateway.title, 'Ig')
assert_is(type(stack.internet_gateway.Tags), Tags)
assert_equals(stack.gateway_attachment.title, 'IgAtch')
assert_is(type(stack.gateway_attachment.VpcId), Ref)
assert_is(type(stack.gateway_attachment.InternetGatewayId), Ref)
assert_equals(stack.public_route_table.title, 'PubRouteTable')
assert_is(type(stack.public_route_table.VpcId), Ref)
assert_is(type(stack.public_route_table.Tags), Tags)
for az in availability_zones:
assert_equals(stack.private_route_tables[az].title, get_cf_friendly_name(az) + 'PriRouteTable')
assert_is(type(stack.private_route_tables[az].VpcId), Ref)
assert_is(type(stack.private_route_tables[az].Tags), Tags)
assert_equals(stack.nat.single.SourceDestCheck, 'false')
assert_equals(stack.jump.single.SourceDestCheck, 'true')
for num in range(len(availability_zones)):
# For public subnets
public_subnet = stack.public_subnets[num]
assert_equals(public_subnet.CidrBlock, ''.join(['10.0.', str(num), '.0/24']))
# For private subnets
private_subnet = stack.private_subnets[num]
assert_equals(private_subnet.CidrBlock, ''.join(['10.0.', str(num + 100), '.0/24']))
assert_equals(len(stack.units), 7)
@with_setup(setup_resources)
def test_highly_available_nat_stack():
""" Test for nat gateway configuration"""
stack = create_stack(nat_highly_available=True)
assert_equals(stack.code_deploy_service_role, code_deploy_service_role)
assert_equals(stack.keypair, keypair)
assert_equals(stack.availability_zones, availability_zones)
assert_equals(stack.vpc_cidr, vpc_cidr)
[assert_equals(stack.home_cidrs[num], home_cidrs[num]) for num in range(len(home_cidrs))]
assert_equals(stack.public_cidr, {'name': 'PublicIp', 'cidr': '0.0.0.0/0'})
assert_equals(stack.internet_gateway.title, 'Ig')
assert_is(type(stack.internet_gateway.Tags), Tags)
assert_equals(stack.gateway_attachment.title, 'IgAtch')
assert_is(type(stack.gateway_attachment.VpcId), Ref)
assert_is(type(stack.gateway_attachment.InternetGatewayId), Ref)
assert_equals(stack.public_route_table.title, 'PubRouteTable')
assert_is(type(stack.public_route_table.VpcId), Ref)
assert_is(type(stack.public_route_table.Tags), Tags)
for az in availability_zones:
assert_equals(stack.private_route_tables[az].title, get_cf_friendly_name(az) + 'PriRouteTable')
assert_is(type(stack.private_route_tables[az].VpcId), Ref)
assert_is(type(stack.private_route_tables[az].Tags), Tags)
assert_equals(len(stack.nat_gateways), len(availability_zones))
assert_equals(stack.jump.single.SourceDestCheck, 'true')
for num in range(len(availability_zones)):
# For public subnets
public_subnet = stack.public_subnets[num]
assert_equals(public_subnet.CidrBlock, ''.join(['10.0.', str(num), '.0/24']))
# For private subnets
private_subnet = stack.private_subnets[num]
assert_equals(private_subnet.CidrBlock, ''.join(['10.0.', str(num + 100), '.0/24']))
assert_equals(len(stack.units), 7)
def test_duplicate_unit_names():
""" Test for duplicate unit names
"""
assert_raises(DuplicateUnitNameError, Stack, **{
'code_deploy_service_role': code_deploy_service_role,
'keypair': keypair,
'availability_zones': availability_zones,
'vpc_cidr': vpc_cidr,
'public_cidr': public_cidr,
'home_cidrs': home_cidrs,
'jump_image_id': jump_image_id,
'jump_instance_type': instance_type,
'nat_image_id': nat_image_id,
'nat_instance_type': instance_type,
'public_hosted_zone_name': None,
'private_hosted_zone_name': 'private.lan.',
'iam_instance_profile_arn': None,
'owner_emails': owner_emails,
'nat_highly_available': False,
'ec2_scheduled_shutdown': False,
'autoscaling_units': [{'unit_title': 'app1',
'asg_config': AsgConfig(
minsize=minsize,
maxsize=maxsize,
image_id=unit_image_id,
instance_type=instance_type,
health_check_grace_period=health_check_grace_period,
health_check_type=health_check_type,
userdata=userdata,
iam_instance_profile_arn=None,
block_devices_config=block_devices_config,
simple_scaling_policy_config=None,
ec2_scheduled_shutdown=None
),
'elb_config': ElbConfig(
elb_listeners_config=elb_listeners_config,
elb_health_check=elb_health_check,
elb_log_bucket=None,
public_unit=True,
ssl_certificate_id=None,
healthy_threshold=healthy_threshold,
unhealthy_threshold=unhealthy_threshold,
interval=interval,
timeout=timeout
),
'dependencies': [],
},
{'unit_title': 'app1',
'elb_config': ElbConfig(
elb_listeners_config=elb_listeners_config,
elb_health_check=elb_health_check,
elb_log_bucket=None,
public_unit=True,
ssl_certificate_id=None,
healthy_threshold=healthy_threshold,
unhealthy_threshold=unhealthy_threshold,
interval=interval,
timeout=timeout
),
'asg_config': AsgConfig(
minsize=minsize,
maxsize=maxsize,
image_id=unit_image_id,
instance_type=instance_type,
health_check_grace_period=health_check_grace_period,
health_check_type=health_check_type,
userdata=userdata,
iam_instance_profile_arn=None,
block_devices_config=None,
simple_scaling_policy_config=None,
ec2_scheduled_shutdown=None
),
'dependencies': [],
}],
'database_units': [],
'zd_autoscaling_units': [],
'cf_distribution_units': [],
'api_gateway_units': [],
'lambda_units': []
})
def create_stack(nat_highly_available=False):
"""
Helper function to create a stack with default values
:return new stack
"""
global userdata, availability_zones, keypair, instance_type, code_deploy_service_role, vpc_cidr, \
public_cidr, instance_port, loadbalancer_port, instance_protocol, loadbalancer_protocol, minsize, maxsize, \
elb_health_check, home_cidrs, nat_image_id, jump_image_id, health_check_grace_period, health_check_type, \
unit_image_id, db_instance_type, db_engine, db_port, owner_emails, db_backup_window, \
db_backup_retention, db_maintenance_window, db_storage_type, block_devices_config, healthy_threshold, \
unhealthy_threshold, interval, timeout, elb_listeners_config, sticky_app_cookies
stack = Stack(
code_deploy_service_role=code_deploy_service_role,
keypair=keypair,
availability_zones=availability_zones,
vpc_cidr=vpc_cidr,
public_cidr=public_cidr,
home_cidrs=home_cidrs,
jump_image_id=jump_image_id,
jump_instance_type=instance_type,
nat_image_id=nat_image_id,
nat_instance_type=instance_type,
public_hosted_zone_name=None,
private_hosted_zone_name='priavte.lan.',
iam_instance_profile_arn=None,
owner_emails=owner_emails,
nat_highly_available=nat_highly_available,
ec2_scheduled_shutdown=False,
zd_autoscaling_units=[{'unit_title': 'zdapp1',
'elb_config': ElbConfig(
elb_listeners_config=elb_listeners_config,
elb_health_check=elb_health_check,
elb_log_bucket=None,
public_unit=True,
ssl_certificate_id=None,
healthy_threshold=healthy_threshold,
unhealthy_threshold=unhealthy_threshold,
interval=interval,
timeout=timeout
),
'blue_asg_config': AsgConfig(
minsize=minsize,
maxsize=maxsize,
image_id=unit_image_id,
instance_type=instance_type,
health_check_grace_period=health_check_grace_period,
health_check_type=health_check_type,
userdata=userdata,
iam_instance_profile_arn=None,
block_devices_config=block_devices_config,
simple_scaling_policy_config=None,
ec2_scheduled_shutdown=None
),
'green_asg_config': AsgConfig(
minsize=minsize,
maxsize=maxsize,
image_id=unit_image_id,
instance_type=instance_type,
health_check_grace_period=health_check_grace_period,
health_check_type=health_check_type,
userdata=userdata,
iam_instance_profile_arn=None,
block_devices_config=block_devices_config,
simple_scaling_policy_config=None,
ec2_scheduled_shutdown=None
),
'dependencies': ['app2:5432', 'db1:80'],
}],
autoscaling_units=[{'unit_title': 'app1',
'elb_config': ElbConfig(
elb_listeners_config=elb_listeners_config,
elb_health_check=elb_health_check,
elb_log_bucket=None,
public_unit=True,
ssl_certificate_id=None,
healthy_threshold=healthy_threshold,
unhealthy_threshold=unhealthy_threshold,
interval=interval,
timeout=timeout
),
'asg_config': AsgConfig(
minsize=minsize,
maxsize=maxsize,
image_id=unit_image_id,
instance_type=instance_type,
health_check_grace_period=health_check_grace_period,
health_check_type=health_check_type,
userdata=userdata,
iam_instance_profile_arn=None,
block_devices_config=block_devices_config,
simple_scaling_policy_config=None,
ec2_scheduled_shutdown=None
),
'dependencies': ['app2:80', 'db1:5432'],
},
{'unit_title': 'app2',
'elb_config': ElbConfig(
elb_listeners_config=elb_listeners_config,
elb_health_check=elb_health_check,
elb_log_bucket=None,
public_unit=True,
ssl_certificate_id=None,
healthy_threshold=healthy_threshold,
unhealthy_threshold=unhealthy_threshold,
interval=interval,
timeout=timeout
),
'asg_config': AsgConfig(
minsize=minsize,
maxsize=maxsize,
image_id=unit_image_id,
instance_type=instance_type,
health_check_grace_period=health_check_grace_period,
health_check_type=health_check_type,
userdata=userdata,
iam_instance_profile_arn=None,
block_devices_config=block_devices_config,
simple_scaling_policy_config=None,
ec2_scheduled_shutdown=None
),
'dependencies': []
}],
database_units=[{'unit_title': 'db1',
'database_config': DatabaseConfig(
db_instance_type=db_instance_type,
db_engine=db_engine,
db_port=db_port,
db_hdd_size=db_hdd_size,
db_snapshot_id=None,
db_name='MyDb',
db_backup_window=db_backup_window,
db_backup_retention=db_backup_retention,
db_maintenance_window=db_maintenance_window,
db_storage_type=db_storage_type
)
}
],
cf_distribution_units=[{'unit_title': 'cfdist1',
'cf_origins_config': [
CFOriginsConfig(
domain_name='amazonia-elb-bucket.s3.amazonaws.com',
origin_id='S3-amazonia-elb-bucket',
origin_path='',
custom_headers={
'Origin': 'http://www.domain.com',
'Accept': 'True'
},
origin_policy={
'is_s3': True,
'origin_access_identity': 'originaccessid1'
}
),
CFOriginsConfig(
domain_name='app1',
origin_id='www-elb',
origin_path='/path',
custom_headers={},
origin_policy={
'is_s3': False,
'origin_protocol_policy': 'https-only',
'http_port': 80,
'https_port': 443,
'origin_ssl_protocols': ['TLSv1', 'TLSv1.1', 'TLSv1.2'],
}
),
CFOriginsConfig(
domain_name='validYamlTestAPIGW',
origin_id='www-elb2',
origin_path='/path',
custom_headers={},
origin_policy={
'is_s3': False,
'origin_protocol_policy': 'https-only',
'http_port': 80,
'https_port': 443,
'origin_ssl_protocols': ['TLSv1', 'TLSv1.1', 'TLSv1.2'],
}
)
],
'cf_distribution_config': CFDistributionConfig(
aliases=['www.test-stack.gadevs.ga', 'test-stack.gadevs.ga'],
comment='SysTestCFDistribution',
default_root_object='index.html',
enabled=True,
price_class='PriceClass_All',
error_page_path='index.html',
acm_cert_arn='arn.acm.certificate',
minimum_protocol_version='TLSv1',
ssl_support_method='sni-only'
),
'cf_cache_behavior_config': [
CFCacheBehaviorConfig(
is_default=True,
path_pattern='/index.html',
allowed_methods=['GET', 'HEAD'],
cached_methods=['GET', 'HEAD'],
target_origin_id='S3-bucket-id',
forward_cookies='all',
forwarded_headers=['Accept', 'Set-Cookie'],
viewer_protocol_policy='allow-all',
min_ttl=0,
default_ttl=0,
max_ttl=0,
trusted_signers=['self'],
query_string='False'
),
CFCacheBehaviorConfig(
is_default=False,
path_pattern='/login.js',
allowed_methods=['GET', 'POST', 'HEAD', 'DELETE', 'OPTIONS', 'PATCH', 'PUT'],
cached_methods=['GET', 'HEAD'],
target_origin_id='www-origin',
forward_cookies='all',
forwarded_headers=['Accept', 'Set-Cookie'],
viewer_protocol_policy='https-only',
min_ttl=0,
default_ttl=0,
max_ttl=0,
trusted_signers=['self'],
query_string='True'
)
]
}],
api_gateway_units=[{'unit_title': 'validYamlTestAPIGW',
'method_config': [
ApiGatewayMethodConfig(
method_name='login',
lambda_unit='validYamlTestLambda',
httpmethod='POST',
authorizationtype='NONE',
request_config=ApiGatewayRequestConfig(
templates={'application/json': ''},
parameters={'somemapping': 'somefield'}
),
response_config=[
ApiGatewayResponseConfig(
templates={'application/json': ''},
parameters={'somemapping': 'somefield'},
statuscode='200',
models={'application/json': 'Empty'},
selectionpattern=''
)]
)
]
}],
lambda_units=[{'unit_title': 'validYamlTestLambda',
'dependencies': ['db1:5432'],
'lambda_config': LambdaConfig(
lambda_s3_bucket='bucket_name',
lambda_s3_key='key_name',
lambda_description='blah',
lambda_function_name='my_function',
lambda_handler='main',
lambda_memory_size=128,
lambda_role_arn='test_arn',
lambda_runtime='python2.7',
lambda_timeout=1,
lambda_schedule='cron(0/5 * * * ? *)'
)
}
]
)
return stack
| bsd-3-clause | -6,543,121,254,940,597,000 | 49.600372 | 119 | 0.455975 | false |
joopert/home-assistant | tests/components/fan/test_init.py | 4 | 1116 | """Tests for fan platforms."""
import unittest
from homeassistant.components.fan import FanEntity
import pytest
class BaseFan(FanEntity):
"""Implementation of the abstract FanEntity."""
def __init__(self):
"""Initialize the fan."""
pass
class TestFanEntity(unittest.TestCase):
"""Test coverage for base fan entity class."""
def setUp(self):
"""Set up test data."""
self.fan = BaseFan()
def tearDown(self):
"""Tear down unit test data."""
self.fan = None
def test_fanentity(self):
"""Test fan entity methods."""
assert "off" == self.fan.state
assert 0 == len(self.fan.speed_list)
assert 0 == self.fan.supported_features
assert {"speed_list": []} == self.fan.state_attributes
# Test set_speed not required
self.fan.oscillate(True)
with pytest.raises(NotImplementedError):
self.fan.set_speed("slow")
with pytest.raises(NotImplementedError):
self.fan.turn_on()
with pytest.raises(NotImplementedError):
self.fan.turn_off()
| apache-2.0 | 2,273,334,755,325,178,600 | 26.219512 | 62 | 0.609319 | false |
cpina/science-cruise-data-management | ScienceCruiseDataManagement/main/management/commands/importprojects.py | 1 | 1792 | from django.core.management.base import BaseCommand, CommandError
from main.models import Project, Person
import csv
# This file is part of https://github.com/cpina/science-cruise-data-management
#
# This project was programmed in a hurry without any prior Django experience,
# while circumnavigating the Antarctic on the ACE expedition, without proper
# Internet access, with 150 scientists using the system and doing at the same
# cruise other data management and system administration tasks.
#
# Sadly there aren't unit tests and we didn't have time to refactor the code
# during the cruise, which is really needed.
#
# Carles Pina ([email protected]) and Jen Thomas ([email protected]), 2016-2017.
class Command(BaseCommand):
help = 'Adds data to the person table'
def add_arguments(self, parser):
parser.add_argument('filename', type=str)
def handle(self, *args, **options):
print(options['filename'])
self.import_data_from_csv(options['filename'])
def import_data_from_csv(self, filename):
with open(filename) as csvfile:
reader = csv.DictReader(csvfile)
for row in reader:
print(row)
project = Project()
project.number = row['project_number']
project.title= row['project_title']
project.alternative_title = row['project_alternative_title']
project.abstract = row['abstract']
if row['name_first'] != '':
print("{}-{}".format(row['name_first'],row['name_last']))
person = Person.objects.filter(name_first=row['name_first']).filter(name_last=row['name_last'])[0]
project.principal_investigator =person
project.save()
| mit | 2,968,487,484,177,550,300 | 39.727273 | 118 | 0.647321 | false |
havt/odoo | addons/account/wizard/account_use_model.py | 341 | 3361 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
from openerp.osv import fields, osv
from openerp.tools.translate import _
class account_use_model(osv.osv_memory):
_name = 'account.use.model'
_description = 'Use model'
_columns = {
'model': fields.many2many('account.model', 'account_use_model_relation', 'account_id', 'model_id', 'Account Model'),
}
def view_init(self, cr , uid , fields_list, context=None):
account_model_obj = self.pool.get('account.model')
if context is None:
context = {}
if context.get('active_ids',False):
data_model = account_model_obj.browse(cr, uid, context['active_ids'])
for model in data_model:
for line in model.lines_id:
if line.date_maturity == 'partner':
if not line.partner_id:
raise osv.except_osv(_('Error!'), _("Maturity date of entry line generated by model line '%s' is based on partner payment term!"\
"\nPlease define partner on it!")%line.name)
pass
def create_entries(self, cr, uid, ids, context=None):
account_model_obj = self.pool.get('account.model')
mod_obj = self.pool.get('ir.model.data')
if context is None:
context = {}
data = self.read(cr, uid, ids, context=context)[0]
record_id = context and context.get('model_line', False) or False
if record_id:
model_ids = data['model']
else:
model_ids = context['active_ids']
move_ids = account_model_obj.generate(cr, uid, model_ids, context=context)
context = dict(context, move_ids=move_ids)
model_data_ids = mod_obj.search(cr, uid,[('model','=','ir.ui.view'),('name','=','view_move_form')], context=context)
resource_id = mod_obj.read(cr, uid, model_data_ids, fields=['res_id'], context=context)[0]['res_id']
return {
'domain': "[('id','in', ["+','.join(map(str,context['move_ids']))+"])]",
'name': 'Entries',
'view_type': 'form',
'view_mode': 'tree,form',
'res_model': 'account.move',
'views': [(False,'tree'),(resource_id,'form')],
'type': 'ir.actions.act_window',
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | 6,215,197,671,965,624,000 | 43.813333 | 157 | 0.565903 | false |
dannyboi104/SickRage | lib/mako/ast.py | 60 | 6702 | # mako/ast.py
# Copyright (C) 2006-2015 the Mako authors and contributors <see AUTHORS file>
#
# This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""utilities for analyzing expressions and blocks of Python
code, as well as generating Python from AST nodes"""
from mako import exceptions, pyparser, compat
import re
class PythonCode(object):
"""represents information about a string containing Python code"""
def __init__(self, code, **exception_kwargs):
self.code = code
# represents all identifiers which are assigned to at some point in
# the code
self.declared_identifiers = set()
# represents all identifiers which are referenced before their
# assignment, if any
self.undeclared_identifiers = set()
# note that an identifier can be in both the undeclared and declared
# lists.
# using AST to parse instead of using code.co_varnames,
# code.co_names has several advantages:
# - we can locate an identifier as "undeclared" even if
# its declared later in the same block of code
# - AST is less likely to break with version changes
# (for example, the behavior of co_names changed a little bit
# in python version 2.5)
if isinstance(code, compat.string_types):
expr = pyparser.parse(code.lstrip(), "exec", **exception_kwargs)
else:
expr = code
f = pyparser.FindIdentifiers(self, **exception_kwargs)
f.visit(expr)
class ArgumentList(object):
"""parses a fragment of code as a comma-separated list of expressions"""
def __init__(self, code, **exception_kwargs):
self.codeargs = []
self.args = []
self.declared_identifiers = set()
self.undeclared_identifiers = set()
if isinstance(code, compat.string_types):
if re.match(r"\S", code) and not re.match(r",\s*$", code):
# if theres text and no trailing comma, insure its parsed
# as a tuple by adding a trailing comma
code += ","
expr = pyparser.parse(code, "exec", **exception_kwargs)
else:
expr = code
f = pyparser.FindTuple(self, PythonCode, **exception_kwargs)
f.visit(expr)
class PythonFragment(PythonCode):
"""extends PythonCode to provide identifier lookups in partial control
statements
e.g.
for x in 5:
elif y==9:
except (MyException, e):
etc.
"""
def __init__(self, code, **exception_kwargs):
m = re.match(r'^(\w+)(?:\s+(.*?))?:\s*(#|$)', code.strip(), re.S)
if not m:
raise exceptions.CompileException(
"Fragment '%s' is not a partial control statement" %
code, **exception_kwargs)
if m.group(3):
code = code[:m.start(3)]
(keyword, expr) = m.group(1,2)
if keyword in ['for','if', 'while']:
code = code + "pass"
elif keyword == 'try':
code = code + "pass\nexcept:pass"
elif keyword == 'elif' or keyword == 'else':
code = "if False:pass\n" + code + "pass"
elif keyword == 'except':
code = "try:pass\n" + code + "pass"
elif keyword == 'with':
code = code + "pass"
else:
raise exceptions.CompileException(
"Unsupported control keyword: '%s'" %
keyword, **exception_kwargs)
super(PythonFragment, self).__init__(code, **exception_kwargs)
class FunctionDecl(object):
"""function declaration"""
def __init__(self, code, allow_kwargs=True, **exception_kwargs):
self.code = code
expr = pyparser.parse(code, "exec", **exception_kwargs)
f = pyparser.ParseFunc(self, **exception_kwargs)
f.visit(expr)
if not hasattr(self, 'funcname'):
raise exceptions.CompileException(
"Code '%s' is not a function declaration" % code,
**exception_kwargs)
if not allow_kwargs and self.kwargs:
raise exceptions.CompileException(
"'**%s' keyword argument not allowed here" %
self.kwargnames[-1], **exception_kwargs)
def get_argument_expressions(self, as_call=False):
"""Return the argument declarations of this FunctionDecl as a printable
list.
By default the return value is appropriate for writing in a ``def``;
set `as_call` to true to build arguments to be passed to the function
instead (assuming locals with the same names as the arguments exist).
"""
namedecls = []
# Build in reverse order, since defaults and slurpy args come last
argnames = self.argnames[::-1]
kwargnames = self.kwargnames[::-1]
defaults = self.defaults[::-1]
kwdefaults = self.kwdefaults[::-1]
# Named arguments
if self.kwargs:
namedecls.append("**" + kwargnames.pop(0))
for name in kwargnames:
# Keyword-only arguments must always be used by name, so even if
# this is a call, print out `foo=foo`
if as_call:
namedecls.append("%s=%s" % (name, name))
elif kwdefaults:
default = kwdefaults.pop(0)
if default is None:
# The AST always gives kwargs a default, since you can do
# `def foo(*, a=1, b, c=3)`
namedecls.append(name)
else:
namedecls.append("%s=%s" % (
name, pyparser.ExpressionGenerator(default).value()))
else:
namedecls.append(name)
# Positional arguments
if self.varargs:
namedecls.append("*" + argnames.pop(0))
for name in argnames:
if as_call or not defaults:
namedecls.append(name)
else:
default = defaults.pop(0)
namedecls.append("%s=%s" % (
name, pyparser.ExpressionGenerator(default).value()))
namedecls.reverse()
return namedecls
@property
def allargnames(self):
return tuple(self.argnames) + tuple(self.kwargnames)
class FunctionArgs(FunctionDecl):
"""the argument portion of a function declaration"""
def __init__(self, code, **kwargs):
super(FunctionArgs, self).__init__("def ANON(%s):pass" % code,
**kwargs)
| gpl-3.0 | -7,948,000,596,961,553,000 | 36.651685 | 79 | 0.565801 | false |
joopert/home-assistant | tests/components/input_datetime/test_reproduce_state.py | 5 | 2723 | """Test reproduce state for Input datetime."""
from homeassistant.core import State
from tests.common import async_mock_service
async def test_reproducing_states(hass, caplog):
"""Test reproducing Input datetime states."""
hass.states.async_set(
"input_datetime.entity_datetime",
"2010-10-10 01:20:00",
{"has_date": True, "has_time": True},
)
hass.states.async_set(
"input_datetime.entity_time", "01:20:00", {"has_date": False, "has_time": True}
)
hass.states.async_set(
"input_datetime.entity_date",
"2010-10-10",
{"has_date": True, "has_time": False},
)
datetime_calls = async_mock_service(hass, "input_datetime", "set_datetime")
# These calls should do nothing as entities already in desired state
await hass.helpers.state.async_reproduce_state(
[
State("input_datetime.entity_datetime", "2010-10-10 01:20:00"),
State("input_datetime.entity_time", "01:20:00"),
State("input_datetime.entity_date", "2010-10-10"),
],
blocking=True,
)
assert len(datetime_calls) == 0
# Test invalid state is handled
await hass.helpers.state.async_reproduce_state(
[
State("input_datetime.entity_datetime", "not_supported"),
State("input_datetime.entity_datetime", "not-valid-date"),
State("input_datetime.entity_datetime", "not:valid:time"),
State("input_datetime.entity_datetime", "1234-56-78 90:12:34"),
],
blocking=True,
)
assert "not_supported" in caplog.text
assert "not-valid-date" in caplog.text
assert "not:valid:time" in caplog.text
assert "1234-56-78 90:12:34" in caplog.text
assert len(datetime_calls) == 0
# Make sure correct services are called
await hass.helpers.state.async_reproduce_state(
[
State("input_datetime.entity_datetime", "2011-10-10 02:20:00"),
State("input_datetime.entity_time", "02:20:00"),
State("input_datetime.entity_date", "2011-10-10"),
# Should not raise
State("input_datetime.non_existing", "2010-10-10 01:20:00"),
],
blocking=True,
)
valid_calls = [
{
"entity_id": "input_datetime.entity_datetime",
"datetime": "2011-10-10 02:20:00",
},
{"entity_id": "input_datetime.entity_time", "time": "02:20:00"},
{"entity_id": "input_datetime.entity_date", "date": "2011-10-10"},
]
assert len(datetime_calls) == 3
for call in datetime_calls:
assert call.domain == "input_datetime"
assert call.data in valid_calls
valid_calls.remove(call.data)
| apache-2.0 | -1,225,122,979,564,478,200 | 33.910256 | 87 | 0.600441 | false |
0jpq0/kbengine | kbe/res/scripts/common/Lib/idlelib/OutputWindow.py | 88 | 4394 | from tkinter import *
from idlelib.EditorWindow import EditorWindow
import re
import tkinter.messagebox as tkMessageBox
from idlelib import IOBinding
class OutputWindow(EditorWindow):
"""An editor window that can serve as an output file.
Also the future base class for the Python shell window.
This class has no input facilities.
"""
def __init__(self, *args):
EditorWindow.__init__(self, *args)
self.text.bind("<<goto-file-line>>", self.goto_file_line)
# Customize EditorWindow
def ispythonsource(self, filename):
# No colorization needed
return 0
def short_title(self):
return "Output"
def maybesave(self):
# Override base class method -- don't ask any questions
if self.get_saved():
return "yes"
else:
return "no"
# Act as output file
def write(self, s, tags=(), mark="insert"):
if isinstance(s, (bytes, bytes)):
s = s.decode(IOBinding.encoding, "replace")
self.text.insert(mark, s, tags)
self.text.see(mark)
self.text.update()
return len(s)
def writelines(self, lines):
for line in lines:
self.write(line)
def flush(self):
pass
# Our own right-button menu
rmenu_specs = [
("Cut", "<<cut>>", "rmenu_check_cut"),
("Copy", "<<copy>>", "rmenu_check_copy"),
("Paste", "<<paste>>", "rmenu_check_paste"),
(None, None, None),
("Go to file/line", "<<goto-file-line>>", None),
]
file_line_pats = [
# order of patterns matters
r'file "([^"]*)", line (\d+)',
r'([^\s]+)\((\d+)\)',
r'^(\s*\S.*?):\s*(\d+):', # Win filename, maybe starting with spaces
r'([^\s]+):\s*(\d+):', # filename or path, ltrim
r'^\s*(\S.*?):\s*(\d+):', # Win abs path with embedded spaces, ltrim
]
file_line_progs = None
def goto_file_line(self, event=None):
if self.file_line_progs is None:
l = []
for pat in self.file_line_pats:
l.append(re.compile(pat, re.IGNORECASE))
self.file_line_progs = l
# x, y = self.event.x, self.event.y
# self.text.mark_set("insert", "@%d,%d" % (x, y))
line = self.text.get("insert linestart", "insert lineend")
result = self._file_line_helper(line)
if not result:
# Try the previous line. This is handy e.g. in tracebacks,
# where you tend to right-click on the displayed source line
line = self.text.get("insert -1line linestart",
"insert -1line lineend")
result = self._file_line_helper(line)
if not result:
tkMessageBox.showerror(
"No special line",
"The line you point at doesn't look like "
"a valid file name followed by a line number.",
master=self.text)
return
filename, lineno = result
edit = self.flist.open(filename)
edit.gotoline(lineno)
def _file_line_helper(self, line):
for prog in self.file_line_progs:
match = prog.search(line)
if match:
filename, lineno = match.group(1, 2)
try:
f = open(filename, "r")
f.close()
break
except OSError:
continue
else:
return None
try:
return filename, int(lineno)
except TypeError:
return None
# These classes are currently not used but might come in handy
class OnDemandOutputWindow:
tagdefs = {
# XXX Should use IdlePrefs.ColorPrefs
"stdout": {"foreground": "blue"},
"stderr": {"foreground": "#007700"},
}
def __init__(self, flist):
self.flist = flist
self.owin = None
def write(self, s, tags, mark):
if not self.owin:
self.setup()
self.owin.write(s, tags, mark)
def setup(self):
self.owin = owin = OutputWindow(self.flist)
text = owin.text
for tag, cnf in self.tagdefs.items():
if cnf:
text.tag_configure(tag, **cnf)
text.tag_raise('sel')
self.write = self.owin.write
| lgpl-3.0 | 687,982,925,328,611,700 | 29.513889 | 77 | 0.528903 | false |
mageec/mageec | script/generate-builds.py | 1 | 12030 | #!/usr/bin/env python3
import argparse
import os
import random
import sys
import mageec
gcc_flags = [
#'-faggressive-loop-optimizations', # Not supported in 4.5
'-falign-functions',
'-falign-jumps',
'-falign-labels',
'-falign-loops',
'-fbranch-count-reg',
'-fbranch-target-load-optimize',
'-fbranch-target-load-optimize2',
'-fbtr-bb-exclusive',
'-fcaller-saves',
#'-fcombine-stack-adjustments', # Not supported in 4.5
#'-fcommon', # affects semantics, unlikely to affect performance
#'-fcompare-elim', # Not supported in 4.5
'-fconserve-stack',
'-fcprop-registers',
'-fcrossjumping',
'-fcse-follow-jumps',
#'-fdata-sections', # affects semantics unlikely to affect performance
'-fdce',
'-fdefer-pop',
'-fdelete-null-pointer-checks',
#'-fdevirtualize', # Not supported in 4.5
'-fdse',
'-fearly-inlining',
'-fexpensive-optimizations',
'-fforward-propagate',
'-fgcse',
'-fgcse-after-reload',
'-fgcse-las',
'-fgcse-lm',
'-fgcse-sm',
'-fguess-branch-probability',
#'-fhoist-adjacent-loads', # Not supported in 4.5
'-fif-conversion',
'-fif-conversion2',
'-finline',
#'-finline-atomics', # Not supported in 4.5
'-finline-functions',
'-finline-functions-called-once',
'-finline-small-functions',
'-fipa-cp',
'-fipa-cp-clone',
#'-fipa-profile', # Not supported in 4.5
'-fipa-pta',
'-fipa-pure-const',
'-fipa-reference',
'-fipa-sra',
#'-fira-hoist-pressure', # Not supported in 4.5
'-fivopts',
'-fmerge-constants',
'-fmodulo-sched',
'-fmove-loop-invariants',
'-fomit-frame-pointer',
'-foptimize-sibling-calls',
#'-foptimize-strlen', # Not supported in 4.5
'-fpeephole',
'-fpeephole2',
'-fpredictive-commoning',
'-fprefetch-loop-arrays',
'-fregmove',
'-frename-registers',
'-freorder-blocks',
'-freorder-functions',
'-frerun-cse-after-loop',
'-freschedule-modulo-scheduled-loops',
'-fsched-critical-path-heuristic',
'-fsched-dep-count-heuristic',
'-fsched-group-heuristic',
'-fsched-interblock',
'-fsched-last-insn-heuristic',
'-fsched-pressure',
'-fsched-rank-heuristic',
'-fsched-spec',
'-fsched-spec-insn-heuristic',
'-fsched-spec-load',
'-fsched-stalled-insns',
'-fsched-stalled-insns-dep',
'-fschedule-insns',
'-fschedule-insns2',
#'-fsection-anchors', # may conflict with other flags
'-fsel-sched-pipelining',
'-fsel-sched-pipelining-outer-loops',
'-fsel-sched-reschedule-pipelined',
'-fselective-scheduling',
'-fselective-scheduling2',
#'-fshrink-wrap', # Not supported in 4.5
'-fsplit-ivs-in-unroller',
'-fsplit-wide-types',
#'-fstrict-aliasing', # affects semantics
'-fthread-jumps',
'-ftoplevel-reorder',
#'-ftree-bit-ccp', # Not supported in 4.5
'-ftree-builtin-call-dce',
'-ftree-ccp',
'-ftree-ch',
#'-ftree-coalesce-inlined-vars', # No equivalent -fno for this flag
#'-ftree-coalesce-vars', # Not supported in 4.5
'-ftree-copy-prop',
'-ftree-copyrename',
'-ftree-cselim',
'-ftree-dce',
'-ftree-dominator-opts',
'-ftree-dse',
'-ftree-forwprop',
'-ftree-fre',
#'-ftree-loop-distribute-patterns', # Not supported in 4.5
'-ftree-loop-distribution',
#'-ftree-loop-if-convert', # Not supported in 4.5
'-ftree-loop-im',
'-ftree-loop-ivcanon',
'-ftree-loop-optimize',
#'-ftree-partial-pre', # Not supported in 4.5
'-ftree-phiprop',
'-ftree-pre',
'-ftree-pta',
'-ftree-reassoc',
'-ftree-scev-cprop',
'-ftree-sink',
'-ftree-slp-vectorize',
#'-ftree-slsr', # Not supported in 4.5
'-ftree-sra',
'-ftree-switch-conversion',
#'-ftree-tail-merge', # Not supported in 4.5
'-ftree-ter',
'-ftree-vect-loop-version',
'-ftree-vectorize',
'-ftree-vrp',
'-funroll-all-loops',
'-funroll-loops',
'-funswitch-loops',
'-fvariable-expansion-in-unroller',
'-fvect-cost-model',
'-fweb'
]
# Make generic to the type of choice which needs to be made
def generate_configs(flags, num_configs, generator):
configs = []
if generator == 'random':
for i in range(0, num_configs):
num_enabled = random.randint(0, len(flags))
flag_seq = random.sample(flags, num_enabled)
configs.append(' '.join(flag_seq))
else:
assert False, 'Unsupported configuration generator'
return configs
def generate_configurations(src_dir, build_dir, install_dir, build_system,
cc, cxx, fort, flags, jobs, database_path,
features_path, num_configs, generator, debug):
assert(os.path.exists(src_dir) and os.path.isabs(src_dir))
assert(os.path.exists(build_dir) and os.path.isabs(build_dir))
assert(os.path.exists(install_dir) and os.path.isabs(install_dir))
assert(os.path.exists(database_path))
assert(os.path.exists(features_path))
assert(mageec.is_command_on_path(cc))
assert(mageec.is_command_on_path(cxx))
assert(mageec.is_command_on_path(fort))
assert(num_configs > 0)
assert(jobs > 0)
configs = generate_configs(gcc_flags, num_configs, generator)
run_id = 0
for config in configs:
run_build_dir = os.path.join(build_dir, 'run-' + str(run_id))
run_install_dir = os.path.join(install_dir, 'run-' + str(run_id))
if not os.path.exists(run_build_dir):
os.makedirs(run_build_dir)
if not os.path.exists(run_install_dir):
os.makedirs(run_install_dir)
run_id += 1
print ('-- Building configuration:\n'
' Configuration: \'' + config + '\'')
compilations_path = os.path.join(run_install_dir, 'compilations.csv')
cc_wrapper = 'mageec-' + cc
cxx_wrapper = 'mageec-' + cxx
fort_wrapper = 'mageec-' + fort
assert(mageec.is_command_on_path(cc_wrapper))
assert(mageec.is_command_on_path(cxx_wrapper))
assert(mageec.is_command_on_path(fort_wrapper))
wrapper_flags = ""
if debug:
wrapper_flags += ' -fmageec-debug'
wrapper_flags += ' -fmageec-mode=gather'
wrapper_flags += ' -fmageec-database=' + database_path
wrapper_flags += ' -fmageec-features=' + features_path
wrapper_flags += ' -fmageec-out=' + compilations_path
new_flags = wrapper_flags + ' ' + flags + ' ' + config
res = mageec.build(src_dir=src_dir,
build_dir=run_build_dir,
install_dir=run_install_dir,
build_system=build_system,
cc=cc_wrapper,
cxx=cxx_wrapper,
fort=fort_wrapper,
flags=new_flags)
# just ignore failed builds
if not res:
print ('-- Build failed. Continuing regardless')
return True
def main():
parser = argparse.ArgumentParser(
description='Generate and build multiple versions of a source project')
# required arguments
parser.add_argument('--src-dir', nargs=1, required=True,
help='Directory containing the source to build')
parser.add_argument('--build-dir', nargs=1, required=True,
help='Build directory')
parser.add_argument('--install-dir', nargs=1, required=True,
help='Install directory')
parser.add_argument('--cc', nargs=1, required=True,
help='Command to use to compile C source')
parser.add_argument('--cxx', nargs=1, required=True,
help='Command to use to compile C++ source')
parser.add_argument('--fort', nargs=1, required=True,
help='Command to use to compile Fortran source')
parser.add_argument('--database', nargs=1, required=True,
help='mageec database to store generated compilations in')
parser.add_argument('--features', nargs=1, required=True,
help='File containing extracted features for the source being built')
parser.add_argument('--num-configs', nargs=1, required=True,
help='Number of configurations of the source to generate')
parser.add_argument('--generator', nargs=1, required=True,
help='Generator to use to generate configurations')
# optional arguments
parser.add_argument('--debug', action='store_true', required=False,
help='Enable debug when doing feature extraction')
parser.add_argument('--build-system', nargs=1, required=False,
help='Build system to be used to build the source. May be \'cmake\', '
'\'configure\', or a script to be used to build the source')
parser.add_argument('--flags', nargs=1, required=False,
help='Common arguments to be used when building')
parser.add_argument('--jobs', nargs=1, required=False,
help='Number of jobs to run when building')
parser.set_defaults(debug=False,
build_system=[None],
flags=[''],
jobs=[1])
args = parser.parse_args(sys.argv[1:])
src_dir = os.path.abspath(args.src_dir[0])
build_dir = os.path.abspath(args.build_dir[0])
install_dir = os.path.abspath(args.install_dir[0])
cc = args.cc[0]
cxx = args.cxx[0]
fort = args.fort[0]
database_path = os.path.abspath(args.database[0])
features_path = os.path.abspath(args.features[0])
num_configs = int(args.num_configs[0])
generator = args.generator[0]
if not os.path.exists(src_dir):
print ('-- Source directory \'' + src_dir + '\' does not exist')
return -1
if not os.path.exists(build_dir):
os.makedirs(build_dir)
if not os.path.exists(install_dir):
os.makedirs(install_dir)
if not os.path.exists(database_path):
print ('-- Database \'' + database_path + '\' does not exist')
return -1
if not os.path.exists(features_path):
print ('-- Features file \'' + features_path + '\' does not exist')
return -1
if not mageec.is_command_on_path(cc):
print ('-- Compiler \'' + cc + '\' is not on the path')
return -1
if not mageec.is_command_on_path(cxx):
print ('-- Compiler \'' + cxx + '\' is not on the path')
return -1
if not mageec.is_command_on_path(fort):
print ('-- Compiler \'' + fort + '\' is not on the path')
return -1
if num_configs <= 0:
print ('-- Cannot generate a negative or zero number of configurations')
return -1
debug = args.debug
build_system = args.build_system[0]
flags = args.flags[0]
jobs = int(args.jobs[0])
if jobs < 1:
print ('-- Number of jobs must be a positive integer')
return -1
res = generate_configurations(src_dir=src_dir,
build_dir=build_dir,
install_dir=install_dir,
build_system=build_system,
cc=cc,
cxx=cxx,
fort=fort,
flags=flags,
jobs=jobs,
database_path=database_path,
features_path=features_path,
num_configs=num_configs,
generator=generator,
debug=debug)
if not res:
return -1
return 0
if __name__ == '__main__':
main()
| gpl-3.0 | 8,509,774,038,920,799,000 | 35.56535 | 91 | 0.566085 | false |
cstipkovic/spidermonkey-research | testing/talos/talos/cmanager_mac.py | 2 | 2801 | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
"""CounterManager for Mac OSX"""
import subprocess
from cmanager import CounterManager
import sys
def GetProcessData(pid):
"""Runs a ps on the process identified by pid and returns the output line
as a list (pid, vsz, rss)
"""
command = ['ps -o pid,vsize,rss -p'+str(pid)]
try:
handle = subprocess.Popen(command, stdout=subprocess.PIPE,
universal_newlines=True, shell=True)
handle.wait()
data = handle.stdout.readlines()
except:
print("Unexpected error executing '%s': %s", (command, sys.exc_info()))
raise
# First line is header output should look like:
# PID VSZ RSS
# 3210 75964 920
line = data[1]
line = line.split()
if line[0] == str(pid):
return line
def GetPrivateBytes(pid):
"""Calculate the amount of private, writeable memory allocated to a
process.
"""
psData = GetProcessData(pid)
return int(psData[1]) * 1024 # convert to bytes
def GetResidentSize(pid):
"""Retrieve the current resident memory for a given process"""
psData = GetProcessData(pid)
return int(psData[2]) * 1024 # convert to bytes
class MacCounterManager(CounterManager):
"""This class manages the monitoring of a process with any number of
counters.
A counter can be any function that takes an argument of one pid and
returns a piece of data about that process.
Some examples are: CalcCPUTime, GetResidentSize, and GetPrivateBytes
"""
counterDict = {"Private Bytes": GetPrivateBytes,
"RSS": GetResidentSize}
def __init__(self, process_name, process, counters):
"""Args:
counters: A list of counters to monitor. Any counters whose name
does not match a key in 'counterDict' will be ignored.
"""
CounterManager.__init__(self)
# the last process is the useful one
self.pid = process.pid
self._loadCounters()
self.registerCounters(counters)
def getCounterValue(self, counterName):
"""Returns the last value of the counter 'counterName'"""
if counterName not in self.registeredCounters:
print("Warning: attempting to collect counter %s and it is not"
" registered" % counterName)
return
try:
return self.registeredCounters[counterName][0](self.pid)
except Exception as e:
print("Error in collecting counter: %s, pid: %s, exception: %s"
% (counterName, self.pid, e))
| mpl-2.0 | -6,439,433,337,595,770,000 | 31.569767 | 79 | 0.62799 | false |
thaim/ansible | lib/ansible/plugins/action/cnos.py | 38 | 3535 | # (C) 2017 Red Hat Inc.
# Copyright (C) 2017 Lenovo.
#
# GNU General Public License v3.0+
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
# Contains Action Plugin methods for CNOS Config Module
# Lenovo Networking
#
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import sys
import copy
from ansible import constants as C
from ansible.plugins.action.network import ActionModule as ActionNetworkModule
from ansible.module_utils.network.cnos.cnos import cnos_provider_spec
from ansible.module_utils.network.common.utils import load_provider
from ansible.module_utils.connection import Connection
from ansible.module_utils._text import to_text
from ansible.utils.display import Display
display = Display()
class ActionModule(ActionNetworkModule):
def run(self, tmp=None, task_vars=None):
del tmp # tmp no longer has any effect
self._config_module = True if self._task.action == 'cnos_config' else False
socket_path = None
if self._play_context.connection == 'local':
provider = load_provider(cnos_provider_spec, self._task.args)
pc = copy.deepcopy(self._play_context)
pc.connection = 'network_cli'
pc.network_os = 'cnos'
pc.remote_addr = provider['host'] or self._play_context.remote_addr
pc.port = provider['port'] or self._play_context.port or 22
pc.remote_user = provider['username'] or self._play_context.connection_user
pc.password = provider['password'] or self._play_context.password
pc.private_key_file = provider['ssh_keyfile'] or self._play_context.private_key_file
command_timeout = int(provider['timeout'] or C.PERSISTENT_COMMAND_TIMEOUT)
pc.become = provider['authorize'] or True
pc.become_pass = provider['auth_pass']
pc.become_method = 'enable'
display.vvv('using connection plugin %s (was local)' % pc.connection, pc.remote_addr)
connection = self._shared_loader_obj.connection_loader.get('persistent', pc, sys.stdin)
connection.set_options(direct={'persistent_command_timeout': command_timeout})
socket_path = connection.run()
display.vvvv('socket_path: %s' % socket_path, pc.remote_addr)
if not socket_path:
return {'failed': True,
'msg': 'unable to open shell. Please see: ' +
'https://docs.ansible.com/ansible/network_debug_troubleshooting.html#unable-to-open-shell'}
task_vars['ansible_socket'] = socket_path
# make sure we are in the right cli context which should be
# enable mode and not config module or exec mode
if socket_path is None:
socket_path = self._connection.socket_path
conn = Connection(socket_path)
out = conn.get_prompt()
if to_text(out, errors='surrogate_then_replace').strip().endswith(')#'):
display.vvvv('In Config mode, sending exit to device', self._play_context.remote_addr)
conn.send_command('exit')
else:
conn.send_command('enable')
result = super(ActionModule, self).run(task_vars=task_vars)
return result
| mit | -7,932,380,170,677,422,000 | 41.083333 | 122 | 0.65884 | false |
jamesblunt/edx-platform | lms/djangoapps/class_dashboard/tests/test_views.py | 133 | 4061 | """
Tests for class dashboard (Metrics tab in instructor dashboard)
"""
import json
from django.test.client import RequestFactory
from mock import patch
from nose.plugins.attrib import attr
from xmodule.modulestore.tests.factories import CourseFactory
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from class_dashboard import views
from student.tests.factories import AdminFactory
@attr('shard_1')
class TestViews(ModuleStoreTestCase):
"""
Tests related to class_dashboard/views.py
"""
def setUp(self):
super(TestViews, self).setUp()
self.request_factory = RequestFactory()
self.request = self.request_factory.get('')
self.request.user = None
self.simple_data = {'error': 'error'}
@patch('class_dashboard.views.has_instructor_access_for_class')
def test_all_problem_grade_distribution_has_access(self, has_access):
"""
Test returns proper value when have proper access
"""
has_access.return_value = True
response = views.all_problem_grade_distribution(self.request, 'test/test/test')
self.assertEqual(json.dumps(self.simple_data), response.content)
@patch('class_dashboard.views.has_instructor_access_for_class')
def test_all_problem_grade_distribution_no_access(self, has_access):
"""
Test for no access
"""
has_access.return_value = False
response = views.all_problem_grade_distribution(self.request, 'test/test/test')
self.assertEqual("{\"error\": \"Access Denied: User does not have access to this course\'s data\"}", response.content)
@patch('class_dashboard.views.has_instructor_access_for_class')
def test_all_sequential_open_distribution_has_access(self, has_access):
"""
Test returns proper value when have proper access
"""
has_access.return_value = True
response = views.all_sequential_open_distrib(self.request, 'test/test/test')
self.assertEqual(json.dumps(self.simple_data), response.content)
@patch('class_dashboard.views.has_instructor_access_for_class')
def test_all_sequential_open_distribution_no_access(self, has_access):
"""
Test for no access
"""
has_access.return_value = False
response = views.all_sequential_open_distrib(self.request, 'test/test/test')
self.assertEqual("{\"error\": \"Access Denied: User does not have access to this course\'s data\"}", response.content)
@patch('class_dashboard.views.has_instructor_access_for_class')
def test_section_problem_grade_distribution_has_access(self, has_access):
"""
Test returns proper value when have proper access
"""
has_access.return_value = True
response = views.section_problem_grade_distrib(self.request, 'test/test/test', '1')
self.assertEqual(json.dumps(self.simple_data), response.content)
@patch('class_dashboard.views.has_instructor_access_for_class')
def test_section_problem_grade_distribution_no_access(self, has_access):
"""
Test for no access
"""
has_access.return_value = False
response = views.section_problem_grade_distrib(self.request, 'test/test/test', '1')
self.assertEqual("{\"error\": \"Access Denied: User does not have access to this course\'s data\"}", response.content)
def test_sending_deprecated_id(self):
course = CourseFactory.create()
instructor = AdminFactory.create()
self.request.user = instructor
response = views.all_sequential_open_distrib(self.request, course.id.to_deprecated_string())
self.assertEqual('[]', response.content)
response = views.all_problem_grade_distribution(self.request, course.id.to_deprecated_string())
self.assertEqual('[]', response.content)
response = views.section_problem_grade_distrib(self.request, course.id.to_deprecated_string(), 'no section')
self.assertEqual('{"error": "error"}', response.content)
| agpl-3.0 | 7,651,137,132,831,873,000 | 38.427184 | 126 | 0.680128 | false |
deroneriksson/incubator-systemml | src/main/python/systemml/random/sampling.py | 7 | 5336 | # -------------------------------------------------------------
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
# -------------------------------------------------------------
__all__ = ['normal', 'uniform', 'poisson']
from ..defmatrix import *
# Special object used internally to specify the placeholder which will be replaced by output ID
# This helps to provide dml containing output ID in constructSamplingNode
OUTPUT_ID = '$$OutputID$$'
def constructSamplingNode(inputs, dml):
"""
Convenient utility to create an intermediate of AST.
Parameters
----------
inputs = list of input matrix objects and/or DMLOp
dml = list of DML string (which will be eventually joined before execution). To specify out.ID, please use the placeholder
"""
dmlOp = DMLOp(inputs)
out = matrix(None, op=dmlOp)
dmlOp.dml = [out.ID if x == OUTPUT_ID else x for x in dml]
return out
INPUTS = []
def asStr(arg):
"""
Internal use only: Convenient utility to update inputs and return appropriate string value
"""
if isinstance(arg, matrix):
INPUTS = INPUTS + [arg]
return arg.ID
else:
return str(arg)
def normal(loc=0.0, scale=1.0, size=(1, 1), sparsity=1.0):
"""
Draw random samples from a normal (Gaussian) distribution.
Parameters
----------
loc: Mean ("centre") of the distribution.
scale: Standard deviation (spread or "width") of the distribution.
size: Output shape (only tuple of length 2, i.e. (m, n), supported).
sparsity: Sparsity (between 0.0 and 1.0).
Examples
--------
>>> import systemml as sml
>>> import numpy as np
>>> sml.setSparkContext(sc)
>>> from systemml import random
>>> m1 = sml.random.normal(loc=3, scale=2, size=(3,3))
>>> m1.toNumPy()
array([[ 3.48857226, 6.17261819, 2.51167259],
[ 3.60506708, -1.90266305, 3.97601633],
[ 3.62245706, 5.9430881 , 2.53070413]])
"""
if len(size) != 2:
raise TypeError('Incorrect type for size. Expected tuple of length 2')
INPUTS = []
rows = asStr(size[0])
cols = asStr(size[1])
loc = asStr(loc)
scale = asStr(scale)
sparsity = asStr(sparsity)
# loc + scale*standard normal
return constructSamplingNode(INPUTS, [
OUTPUT_ID, ' = ', loc, ' + ', scale, ' * random.normal(', rows, ',', cols, ',', sparsity, ')\n'])
def uniform(low=0.0, high=1.0, size=(1, 1), sparsity=1.0):
"""
Draw samples from a uniform distribution.
Parameters
----------
low: Lower boundary of the output interval.
high: Upper boundary of the output interval.
size: Output shape (only tuple of length 2, i.e. (m, n), supported).
sparsity: Sparsity (between 0.0 and 1.0).
Examples
--------
>>> import systemml as sml
>>> import numpy as np
>>> sml.setSparkContext(sc)
>>> from systemml import random
>>> m1 = sml.random.uniform(size=(3,3))
>>> m1.toNumPy()
array([[ 0.54511396, 0.11937437, 0.72975775],
[ 0.14135946, 0.01944448, 0.52544478],
[ 0.67582422, 0.87068849, 0.02766852]])
"""
if len(size) != 2:
raise TypeError('Incorrect type for size. Expected tuple of length 2')
INPUTS = []
rows = asStr(size[0])
cols = asStr(size[1])
low = asStr(low)
high = asStr(high)
sparsity = asStr(sparsity)
return constructSamplingNode(INPUTS, [
OUTPUT_ID, ' = random.uniform(', rows, ',', cols, ',', sparsity, ',', low, ',', high, ')\n'])
def poisson(lam=1.0, size=(1, 1), sparsity=1.0):
"""
Draw samples from a Poisson distribution.
Parameters
----------
lam: Expectation of interval, should be > 0.
size: Output shape (only tuple of length 2, i.e. (m, n), supported).
sparsity: Sparsity (between 0.0 and 1.0).
Examples
--------
>>> import systemml as sml
>>> import numpy as np
>>> sml.setSparkContext(sc)
>>> from systemml import random
>>> m1 = sml.random.poisson(lam=1, size=(3,3))
>>> m1.toNumPy()
array([[ 1., 0., 2.],
[ 1., 0., 0.],
[ 0., 0., 0.]])
"""
if len(size) != 2:
raise TypeError('Incorrect type for size. Expected tuple of length 2')
INPUTS = []
rows = asStr(size[0])
cols = asStr(size[1])
lam = asStr(lam)
sparsity = asStr(sparsity)
return constructSamplingNode(INPUTS, [
OUTPUT_ID, ' = random.poisson(', rows, ',', cols, ',', sparsity, ',', lam, ')\n'])
| apache-2.0 | 6,646,452,491,699,643,000 | 30.761905 | 130 | 0.593703 | false |
amarant/servo | tests/wpt/web-platform-tests/tools/py/py/_path/cacheutil.py | 278 | 3333 | """
This module contains multithread-safe cache implementations.
All Caches have
getorbuild(key, builder)
delentry(key)
methods and allow configuration when instantiating the cache class.
"""
from time import time as gettime
class BasicCache(object):
def __init__(self, maxentries=128):
self.maxentries = maxentries
self.prunenum = int(maxentries - maxentries/8)
self._dict = {}
def clear(self):
self._dict.clear()
def _getentry(self, key):
return self._dict[key]
def _putentry(self, key, entry):
self._prunelowestweight()
self._dict[key] = entry
def delentry(self, key, raising=False):
try:
del self._dict[key]
except KeyError:
if raising:
raise
def getorbuild(self, key, builder):
try:
entry = self._getentry(key)
except KeyError:
entry = self._build(key, builder)
self._putentry(key, entry)
return entry.value
def _prunelowestweight(self):
""" prune out entries with lowest weight. """
numentries = len(self._dict)
if numentries >= self.maxentries:
# evict according to entry's weight
items = [(entry.weight, key)
for key, entry in self._dict.items()]
items.sort()
index = numentries - self.prunenum
if index > 0:
for weight, key in items[:index]:
# in MT situations the element might be gone
self.delentry(key, raising=False)
class BuildcostAccessCache(BasicCache):
""" A BuildTime/Access-counting cache implementation.
the weight of a value is computed as the product of
num-accesses-of-a-value * time-to-build-the-value
The values with the least such weights are evicted
if the cache maxentries threshold is superceded.
For implementation flexibility more than one object
might be evicted at a time.
"""
# time function to use for measuring build-times
def _build(self, key, builder):
start = gettime()
val = builder()
end = gettime()
return WeightedCountingEntry(val, end-start)
class WeightedCountingEntry(object):
def __init__(self, value, oneweight):
self._value = value
self.weight = self._oneweight = oneweight
def value(self):
self.weight += self._oneweight
return self._value
value = property(value)
class AgingCache(BasicCache):
""" This cache prunes out cache entries that are too old.
"""
def __init__(self, maxentries=128, maxseconds=10.0):
super(AgingCache, self).__init__(maxentries)
self.maxseconds = maxseconds
def _getentry(self, key):
entry = self._dict[key]
if entry.isexpired():
self.delentry(key)
raise KeyError(key)
return entry
def _build(self, key, builder):
val = builder()
entry = AgingEntry(val, gettime() + self.maxseconds)
return entry
class AgingEntry(object):
def __init__(self, value, expirationtime):
self.value = value
self.weight = expirationtime
def isexpired(self):
t = gettime()
return t >= self.weight
| mpl-2.0 | 1,975,793,741,031,625,200 | 28.236842 | 67 | 0.59796 | false |
SoftwareExperiment4/SungkyunWiki | wiki/plugins/haystack/search_indexes.py | 16 | 1113 | from __future__ import absolute_import
from __future__ import unicode_literals
from haystack import indexes
from wiki import models
class ArticleIndex(indexes.SearchIndex, indexes.Indexable):
text = indexes.CharField(document=True, use_template=True)
created = indexes.DateTimeField(model_attr='created')
modified = indexes.DateTimeField(model_attr='modified')
# default because indexing fails with whoosh. see.
# http://stackoverflow.com/questions/11995367/how-do-i-use-a-boolean-field-in-django-haystack-search-query
# https://github.com/toastdriven/django-haystack/issues/382
other_read = indexes.BooleanField(model_attr='other_read', default=False)
group_read = indexes.BooleanField(model_attr='group_read', default=False)
owner_id = indexes.IntegerField(model_attr='owner__id', null=True)
group_id = indexes.IntegerField(model_attr='group__id', null=True)
def get_model(self):
return models.Article
def index_queryset(self, using=None):
"""Used when the entire index for model is updated."""
return self.get_model().objects.all()
| gpl-3.0 | 5,482,800,041,241,543,000 | 41.807692 | 110 | 0.72956 | false |
michaelneuder/image_quality_analysis | bin/nets/old/conv_net_SSIM.py | 1 | 6635 | #!/usr/bin/env python3
import os
os.environ['TF_CPP_MIN_LOG_LEVEL']='2'
import numpy as np
np.set_printoptions(threshold=np.nan)
import tensorflow as tf
import time
from PIL import Image as im
def convolve_inner_layers(x, W, b):
y = tf.nn.conv2d(x, W, strides = [1,1,1,1], padding='SAME')
y = tf.nn.bias_add(y, b)
return tf.nn.tanh(y)
def convolve_ouput_layer(x, W, b):
y = tf.nn.conv2d(x, W, strides=[1,1,1,1], padding='SAME')
y = tf.nn.bias_add(y, b)
return y
def conv_net(x, W, b):
conv1 = convolve_inner_layers(x, W['weights1'], b['bias1'])
conv2 = convolve_inner_layers(conv1, W['weights2'], b['bias2'])
conv3 = convolve_inner_layers(conv2, W['weights3'], b['bias3'])
output = convolve_ouput_layer(conv3, W['weights_out'], b['bias_out'])
return output
def get_epoch(x, y, n):
input_size = x.shape[0]
number_batches = int(input_size / n)
extra_examples = input_size % n
batches = {}
batch_indices = np.arange(input_size)
np.random.shuffle(batch_indices)
for i in range(number_batches):
temp_indices = batch_indices[n*i:n*(i+1)]
temp_x = []
temp_y = []
for j in temp_indices:
temp_x.append(x[j])
temp_y.append(y[j])
batches[i] = [np.asarray(temp_x), np.asarray(temp_y)]
if extra_examples != 0:
extra_indices = batch_indices[input_size-extra_examples:input_size]
temp_x = []
temp_y = []
for k in extra_indices:
temp_x.append(x[k])
temp_y.append(y[k])
batches[i+1] = [np.asarray(temp_x), np.asarray(temp_y)]
return batches
def main():
# a bit of ascii fun
print(' _ _ _ ')
print(' ___ ___ _ ____ _____ | |_ _| |_(_) ___ _ __ ')
print(' / __/ _ \| \'_ \ \ / / _ \| | | | | __| |/ _ \| \'_ \ ')
print(' | (_| (_) | | | \ V / (_) | | |_| | |_| | (_) | | | |')
print(' \___\___/|_| |_|\_/ \___/|_|\__,_|\__|_|\___/|_| |_|')
print('=======================================================')
print("initializing variables ...")
filter_dim = 11
weights = {
'weights1': tf.Variable((1/(filter_dim*filter_dim*2))*tf.random_normal([filter_dim,filter_dim,2,30])),
'weights2': tf.Variable((1/(30*filter_dim*filter_dim))*tf.random_normal([filter_dim,filter_dim,30,20])),
'weights3': tf.Variable((1/(20*filter_dim*filter_dim))*tf.random_normal([filter_dim,filter_dim,20,10])),
'weights_out': tf.Variable((1/(10*filter_dim*filter_dim))*tf.random_normal([filter_dim,filter_dim,10,1]))
}
biases = {
'bias1': tf.Variable((1/(filter_dim*filter_dim*2))*tf.random_normal([30])),
'bias2': tf.Variable((1/(30*filter_dim*filter_dim))*tf.random_normal([20])),
'bias3': tf.Variable((1/(20*filter_dim*filter_dim))*tf.random_normal([10])),
'bias_out': tf.Variable((1/(10*filter_dim*filter_dim))*tf.random_normal([1]))
}
# tf Graph input
x = tf.placeholder(tf.float32, [None, 96, 96, 2])
y = tf.placeholder(tf.float32, [None, 96, 96, 1])
# data
print("loading data ...")
original_images_train = np.loadtxt('../../../data/sample_data/orig_500.txt')
reconstructed_images_train = np.loadtxt('../../../data/sample_data/recon_500.txt')
comparison_images_train = np.loadtxt('../../../data/sample_data/comp_500.txt')
original_images_test = np.loadtxt('../../../data/sample_data/orig_140.txt')
reconstructed_images_test = np.loadtxt('../../../data/sample_data/recon_140.txt')
comparison_images_test = np.loadtxt('../../../data/sample_data/comp_140.txt')
# get size of training and testing set
train_size = original_images_train.shape[0]
test_size = original_images_test.shape[0]
# reshaping the result data to --- (num pics), 96, 96, 1
comparison_images_train = np.reshape(comparison_images_train, [train_size, 96, 96, 1])
comparison_images_test = np.reshape(comparison_images_test, [test_size, 96, 96, 1])
# zipping data
combined_data_train = np.reshape(np.dstack((original_images_train, reconstructed_images_train)), [train_size,96,96,2])
combined_data_test = np.reshape(np.dstack((original_images_test, reconstructed_images_test)), [test_size,96,96,2])
#### temporary edit --- don't forget to remove
for i in range(96,192):
print(original_images_train[0][i], reconstructed_images_train[0][i], combined_data_train[0][1][i-96])
exit()
# paramaters
learning_rate = .0001
epochs = 100
# model
prediction = conv_net(x, weights, biases)
# saving state
saver = tf.train.Saver()
# loss and optimization
cost = tf.reduce_mean(tf.square(tf.subtract(prediction, y)))
optimizer = tf.train.AdamOptimizer(learning_rate=learning_rate).minimize(cost)
# session
init = tf.global_variables_initializer()
with tf.Session() as sess:
sess.run(init)
global_step = 0
epoch_count = 0
start_time = time.time()
print("starting training ... ")
while epoch_count < epochs:
epoch_time = time.time()
print('-------------------------------------------------------')
print('beginning epoch {} ...'.format(epoch_count))
epoch = get_epoch(combined_data_train, comparison_images_train, 50)
for i in epoch:
x_data_train, y_data_train = np.asarray(epoch[i][0]), np.asarray(epoch[i][1])
sess.run(optimizer, feed_dict={x : x_data_train, y : y_data_train})
loss = sess.run(cost, feed_dict={x : x_data_train, y : y_data_train})
print(" - training global_step {}. current error: {}. ".format(global_step, loss))
global_step+=1
print('epoch {} completed in {} seconds. current error = {}'.format(epoch_count, time.time()-epoch_time, loss))
print('-------------------------------------------------------')
epoch_count+=1
print('optimization finished!')
prediction = np.asarray(sess.run(prediction, feed_dict={x : [combined_data_train[0]]}))
target = np.asarray([comparison_images_test[0]])
print(prediction.shape, target.shape)
with open('post_training.csv', mode = 'w') as write_file:
write_file.write('target, prediction\n')
for i in range(96):
for j in range(96):
write_file.write(str(float(target[0][i][j][0])) + ', ' + str(float(prediction[0][i][j][0])) + '\n')
write_file.close()
if __name__ == '__main__':
main()
| mit | 930,283,126,851,834,000 | 41.532051 | 123 | 0.559759 | false |
derDavidT/sympy | sympy/calculus/euler.py | 54 | 3152 | from sympy import Function, sympify, diff, Eq, S, Symbol, Derivative
from sympy.core.compatibility import (
combinations_with_replacement, iterable, range)
def euler_equations(L, funcs=(), vars=()):
r"""
Find the Euler-Lagrange equations [1]_ for a given Lagrangian.
Parameters
==========
L : Expr
The Lagrangian that should be a function of the functions listed
in the second argument and their derivatives.
For example, in the case of two functions `f(x,y)`, `g(x,y)` and
two independent variables `x`, `y` the Lagrangian would have the form:
.. math:: L\left(f(x,y),g(x,y),\frac{\partial f(x,y)}{\partial x},
\frac{\partial f(x,y)}{\partial y},
\frac{\partial g(x,y)}{\partial x},
\frac{\partial g(x,y)}{\partial y},x,y\right)
In many cases it is not necessary to provide anything, except the
Lagrangian, it will be auto-detected (and an error raised if this
couldn't be done).
funcs : Function or an iterable of Functions
The functions that the Lagrangian depends on. The Euler equations
are differential equations for each of these functions.
vars : Symbol or an iterable of Symbols
The Symbols that are the independent variables of the functions.
Returns
=======
eqns : list of Eq
The list of differential equations, one for each function.
Examples
========
>>> from sympy import Symbol, Function
>>> from sympy.calculus.euler import euler_equations
>>> x = Function('x')
>>> t = Symbol('t')
>>> L = (x(t).diff(t))**2/2 - x(t)**2/2
>>> euler_equations(L, x(t), t)
[Eq(-x(t) - Derivative(x(t), t, t), 0)]
>>> u = Function('u')
>>> x = Symbol('x')
>>> L = (u(t, x).diff(t))**2/2 - (u(t, x).diff(x))**2/2
>>> euler_equations(L, u(t, x), [t, x])
[Eq(-Derivative(u(t, x), t, t) + Derivative(u(t, x), x, x), 0)]
References
==========
.. [1] http://en.wikipedia.org/wiki/Euler%E2%80%93Lagrange_equation
"""
funcs = tuple(funcs) if iterable(funcs) else (funcs,)
if not funcs:
funcs = tuple(L.atoms(Function))
else:
for f in funcs:
if not isinstance(f, Function):
raise TypeError('Function expected, got: %s' % f)
vars = tuple(vars) if iterable(vars) else (vars,)
if not vars:
vars = funcs[0].args
else:
vars = tuple(sympify(var) for var in vars)
if not all(isinstance(v, Symbol) for v in vars):
raise TypeError('Variables are not symbols, got %s' % vars)
for f in funcs:
if not vars == f.args:
raise ValueError("Variables %s don't match args: %s" % (vars, f))
order = max(len(d.variables) for d in L.atoms(Derivative)
if d.expr in funcs)
eqns = []
for f in funcs:
eq = diff(L, f)
for i in range(1, order + 1):
for p in combinations_with_replacement(vars, i):
eq = eq + S.NegativeOne**i*diff(L, diff(f, *p), *p)
eqns.append(Eq(eq))
return eqns
| bsd-3-clause | 5,167,032,256,292,410,000 | 30.838384 | 78 | 0.571066 | false |
KyoungRan/Django_React_ex | Django_React_Workshop-mbrochh/django/myvenv/lib/python3.4/site-packages/django/contrib/gis/db/backends/spatialite/operations.py | 28 | 10604 | """
SQL functions reference lists:
http://www.gaia-gis.it/spatialite-3.0.0-BETA/spatialite-sql-3.0.0.html
https://web.archive.org/web/20130407175746/http://www.gaia-gis.it/gaia-sins/spatialite-sql-4.0.0.html
http://www.gaia-gis.it/gaia-sins/spatialite-sql-4.2.1.html
"""
import re
import sys
from django.contrib.gis.db.backends.base.operations import \
BaseSpatialOperations
from django.contrib.gis.db.backends.spatialite.adapter import SpatiaLiteAdapter
from django.contrib.gis.db.backends.utils import SpatialOperator
from django.contrib.gis.db.models import aggregates
from django.contrib.gis.geometry.backend import Geometry
from django.contrib.gis.measure import Distance
from django.core.exceptions import ImproperlyConfigured
from django.db.backends.sqlite3.operations import DatabaseOperations
from django.utils import six
from django.utils.functional import cached_property
class SpatiaLiteOperations(BaseSpatialOperations, DatabaseOperations):
name = 'spatialite'
spatialite = True
version_regex = re.compile(r'^(?P<major>\d)\.(?P<minor1>\d)\.(?P<minor2>\d+)')
Adapter = SpatiaLiteAdapter
area = 'Area'
centroid = 'Centroid'
collect = 'Collect'
contained = 'MbrWithin'
difference = 'Difference'
distance = 'Distance'
envelope = 'Envelope'
extent = 'Extent'
geojson = 'AsGeoJSON'
gml = 'AsGML'
intersection = 'Intersection'
kml = 'AsKML'
length = 'GLength' # OpenGis defines Length, but this conflicts with an SQLite reserved keyword
makeline = 'MakeLine'
num_geom = 'NumGeometries'
num_points = 'NumPoints'
point_on_surface = 'PointOnSurface'
scale = 'ScaleCoords'
svg = 'AsSVG'
sym_difference = 'SymDifference'
transform = 'Transform'
translate = 'ShiftCoords'
union = 'GUnion' # OpenGis defines Union, but this conflicts with an SQLite reserved keyword
unionagg = 'GUnion'
from_text = 'GeomFromText'
from_wkb = 'GeomFromWKB'
select = 'AsText(%s)'
gis_operators = {
'equals': SpatialOperator(func='Equals'),
'disjoint': SpatialOperator(func='Disjoint'),
'touches': SpatialOperator(func='Touches'),
'crosses': SpatialOperator(func='Crosses'),
'within': SpatialOperator(func='Within'),
'overlaps': SpatialOperator(func='Overlaps'),
'contains': SpatialOperator(func='Contains'),
'intersects': SpatialOperator(func='Intersects'),
'relate': SpatialOperator(func='Relate'),
# Returns true if B's bounding box completely contains A's bounding box.
'contained': SpatialOperator(func='MbrWithin'),
# Returns true if A's bounding box completely contains B's bounding box.
'bbcontains': SpatialOperator(func='MbrContains'),
# Returns true if A's bounding box overlaps B's bounding box.
'bboverlaps': SpatialOperator(func='MbrOverlaps'),
# These are implemented here as synonyms for Equals
'same_as': SpatialOperator(func='Equals'),
'exact': SpatialOperator(func='Equals'),
'distance_gt': SpatialOperator(func='Distance', op='>'),
'distance_gte': SpatialOperator(func='Distance', op='>='),
'distance_lt': SpatialOperator(func='Distance', op='<'),
'distance_lte': SpatialOperator(func='Distance', op='<='),
}
disallowed_aggregates = (aggregates.Extent3D,)
@cached_property
def function_names(self):
return {
'Length': 'ST_Length',
'Reverse': 'ST_Reverse',
'Scale': 'ScaleCoords',
'Translate': 'ST_Translate' if self.spatial_version >= (3, 1, 0) else 'ShiftCoords',
'Union': 'ST_Union',
}
@cached_property
def unsupported_functions(self):
unsupported = {'BoundingCircle', 'ForceRHR', 'IsValid', 'MakeValid', 'MemSize'}
if self.spatial_version < (3, 1, 0):
unsupported.add('SnapToGrid')
if self.spatial_version < (4, 0, 0):
unsupported.update({'Perimeter', 'Reverse'})
elif not self.lwgeom_version():
unsupported.add('GeoHash')
return unsupported
@cached_property
def spatial_version(self):
"""Determine the version of the SpatiaLite library."""
try:
version = self.spatialite_version_tuple()[1:]
except Exception as msg:
new_msg = (
'Cannot determine the SpatiaLite version for the "%s" '
'database (error was "%s"). Was the SpatiaLite initialization '
'SQL loaded on this database?') % (self.connection.settings_dict['NAME'], msg)
six.reraise(ImproperlyConfigured, ImproperlyConfigured(new_msg), sys.exc_info()[2])
if version < (3, 0, 0):
raise ImproperlyConfigured('GeoDjango only supports SpatiaLite versions 3.0.0 and above.')
return version
def convert_extent(self, box, srid):
"""
Convert the polygon data received from SpatiaLite to min/max values.
"""
if box is None:
return None
shell = Geometry(box, srid).shell
xmin, ymin = shell[0][:2]
xmax, ymax = shell[2][:2]
return (xmin, ymin, xmax, ymax)
def convert_geom(self, wkt, geo_field):
"""
Converts geometry WKT returned from a SpatiaLite aggregate.
"""
if wkt:
return Geometry(wkt, geo_field.srid)
else:
return None
def geo_db_type(self, f):
"""
Returns None because geometry columns are added via the
`AddGeometryColumn` stored procedure on SpatiaLite.
"""
return None
def get_distance(self, f, value, lookup_type, **kwargs):
"""
Returns the distance parameters for the given geometry field,
lookup value, and lookup type. SpatiaLite only supports regular
cartesian-based queries (no spheroid/sphere calculations for point
geometries like PostGIS).
"""
if not value:
return []
value = value[0]
if isinstance(value, Distance):
if f.geodetic(self.connection):
raise ValueError('SpatiaLite does not support distance queries on '
'geometry fields with a geodetic coordinate system. '
'Distance objects; use a numeric value of your '
'distance in degrees instead.')
else:
dist_param = getattr(value, Distance.unit_attname(f.units_name(self.connection)))
else:
dist_param = value
return [dist_param]
def get_geom_placeholder(self, f, value, compiler):
"""
Provides a proper substitution value for Geometries that are not in the
SRID of the field. Specifically, this routine will substitute in the
Transform() and GeomFromText() function call(s).
"""
def transform_value(value, srid):
return not (value is None or value.srid == srid)
if hasattr(value, 'as_sql'):
if transform_value(value, f.srid):
placeholder = '%s(%%s, %s)' % (self.transform, f.srid)
else:
placeholder = '%s'
# No geometry value used for F expression, substitute in
# the column name instead.
sql, _ = compiler.compile(value)
return placeholder % sql
else:
if transform_value(value, f.srid):
# Adding Transform() to the SQL placeholder.
return '%s(%s(%%s,%s), %s)' % (self.transform, self.from_text, value.srid, f.srid)
else:
return '%s(%%s,%s)' % (self.from_text, f.srid)
def _get_spatialite_func(self, func):
"""
Helper routine for calling SpatiaLite functions and returning
their result.
Any error occurring in this method should be handled by the caller.
"""
cursor = self.connection._cursor()
try:
cursor.execute('SELECT %s' % func)
row = cursor.fetchone()
finally:
cursor.close()
return row[0]
def geos_version(self):
"Returns the version of GEOS used by SpatiaLite as a string."
return self._get_spatialite_func('geos_version()')
def proj4_version(self):
"Returns the version of the PROJ.4 library used by SpatiaLite."
return self._get_spatialite_func('proj4_version()')
def lwgeom_version(self):
"""Return the version of LWGEOM library used by SpatiaLite."""
return self._get_spatialite_func('lwgeom_version()')
def spatialite_version(self):
"Returns the SpatiaLite library version as a string."
return self._get_spatialite_func('spatialite_version()')
def spatialite_version_tuple(self):
"""
Returns the SpatiaLite version as a tuple (version string, major,
minor, subminor).
"""
version = self.spatialite_version()
m = self.version_regex.match(version)
if m:
major = int(m.group('major'))
minor1 = int(m.group('minor1'))
minor2 = int(m.group('minor2'))
else:
raise Exception('Could not parse SpatiaLite version string: %s' % version)
return (version, major, minor1, minor2)
def spatial_aggregate_name(self, agg_name):
"""
Returns the spatial aggregate SQL template and function for the
given Aggregate instance.
"""
agg_name = 'unionagg' if agg_name.lower() == 'union' else agg_name.lower()
return getattr(self, agg_name)
# Routines for getting the OGC-compliant models.
def geometry_columns(self):
from django.contrib.gis.db.backends.spatialite.models import SpatialiteGeometryColumns
return SpatialiteGeometryColumns
def spatial_ref_sys(self):
from django.contrib.gis.db.backends.spatialite.models import SpatialiteSpatialRefSys
return SpatialiteSpatialRefSys
def get_db_converters(self, expression):
converters = super(SpatiaLiteOperations, self).get_db_converters(expression)
if hasattr(expression.output_field, 'geom_type'):
converters.append(self.convert_geometry)
return converters
def convert_geometry(self, value, expression, connection, context):
if value:
value = Geometry(value)
if 'transformed_srid' in context:
value.srid = context['transformed_srid']
return value
| mit | 7,782,957,256,435,374,000 | 38.129151 | 102 | 0.621652 | false |
skiselev/upm | examples/python/relay.py | 7 | 1896 | from __future__ import print_function
# Author: Sarah Knepper <[email protected]>
# Copyright (c) 2015 Intel Corporation.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
import time
from upm import pyupm_grove as grove
def main():
# Create the relay switch object using GPIO pin 0
relay = grove.Relay(0)
# Close and then open the relay switch 3 times,
# waiting one second each time. The LED on the relay switch
# will light up when the switch is on (closed).
# The switch will also make a noise between transitions.
for i in range (0,3):
relay.on()
if relay.isOn():
print(relay.name(), 'is on')
time.sleep(1)
relay.off()
if relay.isOff():
print(relay.name(), 'is off')
time.sleep(1)
# Delete the relay switch object
del relay
if __name__ == '__main__':
main()
| mit | -1,082,877,575,210,683,600 | 37.693878 | 72 | 0.707806 | false |
richardcs/ansible | lib/ansible/plugins/lookup/mongodb.py | 84 | 8872 | # (c) 2016, Marcos Diez <[email protected]>
# https://github.com/marcosdiez/
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
from ansible.module_utils.six import string_types, integer_types
__metaclass__ = type
DOCUMENTATION = '''
author: 'Marcos Diez <marcos (at) unitron.com.br>'
lookup: mongodb
version_added: "2.3"
short_description: lookup info from MongoDB
description:
- 'The ``MongoDB`` lookup runs the *find()* command on a given *collection* on a given *MongoDB* server.'
- 'The result is a list of jsons, so slightly different from what PyMongo returns. In particular, *timestamps* are converted to epoch integers.'
options:
connect_string:
description:
- Can be any valid MongoDB connection string, supporting authentication, replicasets, etc.
- "More info at U(https://docs.mongodb.org/manual/reference/connection-string/)"
default: "mongodb://localhost/"
database:
description:
- Name of the database which the query will be made
required: True
collection:
description:
- Name of the collection which the query will be made
required: True
filter:
description:
- Criteria of the output
type: 'dict'
default: '{}'
projection:
description:
- Fields you want returned
type: dict
default: "{}"
skip:
description:
- How many results should be skipped
type: integer
limit:
description:
- How many results should be shown
type: integer
sort:
description:
- Sorting rules. Please notice the constats are replaced by strings.
type: list
default: "[]"
notes:
- "Please check https://api.mongodb.org/python/current/api/pymongo/collection.html?highlight=find#pymongo.collection.Collection.find for more details."
requirements:
- pymongo >= 2.4 (python library)
'''
EXAMPLES = '''
- hosts: all
gather_facts: false
vars:
mongodb_parameters:
#mandatory parameters
database: 'local'
#optional
collection: "startup_log"
connection_string: "mongodb://localhost/"
extra_connection_parameters: { "ssl" : True , "ssl_certfile": /etc/self_signed_certificate.pem" }
#optional query parameters, we accept any parameter from the normal mongodb query.
filter: { "hostname": "batman" }
projection: { "pid": True , "_id" : False , "hostname" : True }
skip: 0
limit: 1
sort: [ [ "startTime" , "ASCENDING" ] , [ "age", "DESCENDING" ] ]
tasks:
- debug: msg="Mongo has already started with the following PID [{{ item.pid }}]"
with_mongodb: "{{mongodb_parameters}}"
'''
import datetime
try:
from pymongo import ASCENDING, DESCENDING
from pymongo.errors import ConnectionFailure
from pymongo import MongoClient
except ImportError:
try: # for older PyMongo 2.2
from pymongo import Connection as MongoClient
except ImportError:
pymongo_found = False
else:
pymongo_found = True
else:
pymongo_found = True
from ansible.errors import AnsibleError
from ansible.plugins.lookup import LookupBase
class LookupModule(LookupBase):
def _fix_sort_parameter(self, sort_parameter):
if sort_parameter is None:
return sort_parameter
if not isinstance(sort_parameter, list):
raise AnsibleError(u"Error. Sort parameters must be a list, not [ {0} ]".format(sort_parameter))
for item in sort_parameter:
self._convert_sort_string_to_constant(item)
return sort_parameter
def _convert_sort_string_to_constant(self, item):
original_sort_order = item[1]
sort_order = original_sort_order.upper()
if sort_order == u"ASCENDING":
item[1] = ASCENDING
elif sort_order == u"DESCENDING":
item[1] = DESCENDING
# else the user knows what s/he is doing and we won't predict. PyMongo will return an error if necessary
def convert_mongo_result_to_valid_json(self, result):
if result is None:
return result
if isinstance(result, integer_types + (float, bool)):
return result
if isinstance(result, string_types):
return result
elif isinstance(result, list):
new_list = []
for elem in result:
new_list.append(self.convert_mongo_result_to_valid_json(elem))
return new_list
elif isinstance(result, dict):
new_dict = {}
for key in result.keys():
value = result[key] # python2 and 3 compatible....
new_dict[key] = self.convert_mongo_result_to_valid_json(value)
return new_dict
elif isinstance(result, datetime.datetime):
# epoch
return (result - datetime.datetime(1970, 1, 1)). total_seconds()
else:
# failsafe
return u"{0}".format(result)
def run(self, terms, variables, **kwargs):
ret = []
for term in terms:
u'''
Makes a MongoDB query and returns the output as a valid list of json.
Timestamps are converted to epoch integers/longs.
Here is a sample playbook that uses it:
-------------------------------------------------------------------------------
- hosts: all
gather_facts: false
vars:
mongodb_parameters:
#optional parameter, default = "mongodb://localhost/"
# connection_string: "mongodb://localhost/"
#mandatory parameters
database: 'local'
collection: "startup_log"
#optional query parameters
#we accept any parameter from the normal mongodb query.
# the official documentation is here
# https://api.mongodb.org/python/current/api/pymongo/collection.html?highlight=find#pymongo.collection.Collection.find
# filter: { "hostname": "batman" }
# projection: { "pid": True , "_id" : False , "hostname" : True }
# skip: 0
# limit: 1
# sort: [ [ "startTime" , "ASCENDING" ] , [ "age", "DESCENDING" ] ]
# extra_connection_parameters = { }
# dictionary with extra parameters like ssl, ssl_keyfile, maxPoolSize etc...
# the full list is available here. It varies from PyMongo version
# https://api.mongodb.org/python/current/api/pymongo/mongo_client.html#pymongo.mongo_client.MongoClient
tasks:
- debug: msg="Mongo has already started with the following PID [{{ item.pid }}] - full_data {{ item }} "
with_items:
- "{{ lookup('mongodb', mongodb_parameters) }}"
-------------------------------------------------------------------------------
'''
connection_string = term.get(u'connection_string', u"mongodb://localhost")
database = term[u"database"]
collection = term[u'collection']
extra_connection_parameters = term.get(u'extra_connection_parameters', {})
if u"extra_connection_parameters" in term:
del term[u"extra_connection_parameters"]
if u"connection_string" in term:
del term[u"connection_string"]
del term[u"database"]
del term[u"collection"]
if u"sort" in term:
term[u"sort"] = self._fix_sort_parameter(term[u"sort"])
# all other parameters are sent to mongo, so we are future and past proof
try:
client = MongoClient(connection_string, **extra_connection_parameters)
results = client[database][collection].find(**term)
for result in results:
result = self.convert_mongo_result_to_valid_json(result)
ret.append(result)
except ConnectionFailure as e:
raise AnsibleError(u'unable to connect to database: %s' % str(e))
return ret
| gpl-3.0 | -7,530,318,172,670,179,000 | 36.277311 | 159 | 0.599865 | false |
pablorecio/resistencia-1812 | resistencia/gui/round_results.py | 1 | 6387 | # -*- coding: utf-8 -*-
###############################################################################
# This file is part of Resistencia Cadiz 1812. #
# #
# This program is free software: you can redistribute it and/or modify #
# it under the terms of the GNU General Public License as published by #
# the Free Software Foundation, either version 3 of the License, or #
# any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with this program. If not, see <http://www.gnu.org/licenses/>. #
# #
# Copyright (C) 2010, Pablo Recio Quijano, <[email protected]> #
###############################################################################
import gtk
from resistencia import xdg
from resistencia.nls import gettext as _
def _draw_string(string, color):
return '<span foreground="' + color + '"><b>' + string + '</b></span>'
class roundResults:
def add_column(self, list_view, title, columnId):
column = gtk.TreeViewColumn(title, gtk.CellRendererText(), markup=columnId)
list_view.append_column(column)
def fill_classification(self):
i = 1
top = int(len(self.classifications) / 2)
for e in self.classifications:
if e[0] == 'aux_ghost_team':
top = top - 1
color = '#0C0C9D'
print self.classifications
for e in self.classifications:
name = e[0]
if not name == 'aux_ghost_team':
if self.show_top_teams and (i - 1) < top:
name = _draw_string(name, color)
self.list_store_classifications.append((i, name, e[1]))
i = i + 1
def fill_results(self):
for e in self.results:
teamA = e[0][0].replace('aux_ghost_team', _('Rests'))
teamB = e[0][1].replace('aux_ghost_team', _('Rests'))
win_color = '#0C0C9D'
draw_color = '#5DEA5D'
if e[1] == 1:
teamA = _draw_string(teamA, win_color)
elif e[1] == -1:
teamB = _draw_string(teamB, win_color)
else: #draw
teamA = _draw_string(teamA, draw_color)
teamB = _draw_string(teamB, draw_color)
self.list_store_results.append((teamA, teamB))
def __init__(self, classification, results, round, rounds,
show_classifications=True, show_top_teams=False): #add parent
builder = gtk.Builder()
builder.add_from_file(xdg.get_data_path('glade/results.glade'))
self.classifications = classification
self.results = results
self.round = round
self.rounds = rounds
self.show_top_teams = show_top_teams
self.result_dialog = builder.get_object('dlg_results')
title = self.result_dialog.get_title() + ' ' + str(round) + '/' + str(rounds)
self.result_dialog.set_title(title)
self.confirmation_dialog = builder.get_object('dlg_confirmation_close')
self.confirmation_dialog.connect('response', lambda d, r: d.hide())
self.confirmation_dialog.set_transient_for(self.result_dialog)
self.finalround_dialog = builder.get_object('dlg_finalround')
self.finalround_dialog.connect('response', lambda d, r: d.hide())
self.finalround_dialog.set_transient_for(self.result_dialog)
self.list_view_classifications = builder.get_object('treeview_classification')
self.list_view_results = builder.get_object('treeview_results')
if show_classifications:
self.cPosition = 0
self.cTeamName = 1
self.cPuntuations = 2
self.sPosition = 'Pos'
self.sTeamName = _('Team name')
self.sPuntuations = 'Punt'
self.add_column(self.list_view_classifications,
self.sPosition, self.cPosition)
self.add_column(self.list_view_classifications,
self.sTeamName, self.cTeamName)
self.add_column(self.list_view_classifications,
self.sPuntuations, self.cPuntuations)
self.list_store_classifications = builder.get_object('list_classification')
if show_classifications:
self.fill_classification()
else:
builder.get_object('hbox1').remove(builder.get_object('frame_classifications'))
self.cTeamA = 0
self.cTeamB = 1
self.sTeamA = _('Team A')
self.sTeamB = _('Team B')
self.add_column(self.list_view_results, self.sTeamA, self.cTeamA)
self.add_column(self.list_view_results, self.sTeamB, self.cTeamB)
self.list_store_results = builder.get_object('list_results')
self.fill_results()
self.end_contest = False
builder.connect_signals(self)
def on_dlg_results_show(self, data=None):
print 'on_dlg_results_show'
if self.round == self.rounds:
self.finalround_dialog.run()
def on_btn_results_cancel_clicked(self, widget):
self.confirmation_dialog.run()
def on_btn_results_next_clicked(self, widget):
self.result_dialog.hide()
def on_dlg_results_close(self, widget, data=None):
self.result_dialog.destroy()
def on_btn_confirmation_apply_clicked(self, widget, data=None):
self.confirmation_dialog.destroy()
self.result_dialog.destroy()
self.end_contest = True
def on_btn_confirmation_cancel_clicked(self, widget, data=None):
self.confirmation_dialog.hide()
self.result_dialog.run()
| gpl-3.0 | -3,368,089,194,443,015,000 | 41.865772 | 91 | 0.546109 | false |
rysson/filmkodi | plugin.video.mrknow/lib/parser2.py | 2 | 28077 | # -*- coding: utf-8 -*-
import common
import sys, os, traceback
import time
import random
import re
import urllib
import string
from string import lower
from entities.CList import CList
from entities.CItemInfo import CItemInfo
from entities.CListItem import CListItem
from entities.CRuleItem import CRuleItem
import customReplacements as cr
import customConversions as cc
from utils import decryptionUtils as crypt
from utils import datetimeUtils as dt
from utils import rowbalance as rb
from utils.fileUtils import findInSubdirectory, getFileContent, getFileExtension
from utils.scrapingUtils import findVideoFrameLink, findContentRefreshLink, findRTMP, findJS, findPHP, getHostName, findEmbedPHPLink
from common import getHTML
import requests
def mydump(obj):
'''return a printable representation of an object for debugging'''
newobj=obj
if '__dict__' in dir(obj):
newobj=obj.__dict__
if ' object at ' in unicode(obj) and not newobj.has_key('__type__'):
newobj['__type__']=unicode(obj)
for attr in newobj:
newobj[attr]=mydump(newobj[attr])
return newobj
class ParsingResult(object):
class Code:
SUCCESS = 0
CFGFILE_NOT_FOUND = 1
CFGSYNTAX_INVALID = 2
WEBREQUEST_FAILED = 3
def __init__(self, code, itemsList):
self.code = code
self.list = itemsList
self.message = None
class Parser2(object):
"""
returns a list of items
"""
def parse(self, lItem):
url = lItem['url']
cfg = lItem['cfg']
ext = getFileExtension(url)
successfullyScraped = True
tmpList = None
if lItem['catcher']:
catcher = lItem['catcher']
cfg = os.path.join(common.Paths.catchersDir, '__' + catcher + '.cfg')
tmpList = self.__loadLocal(cfg, lItem)
if tmpList and len(tmpList.rules) > 0:
successfullyScraped = self.__loadRemote(tmpList, lItem)
else:
if ext == 'cfg':
tmpList = self.__loadLocal(url, lItem)
if tmpList and tmpList.start != '' and len(tmpList.rules) > 0:
lItem['url'] = tmpList.start
successfullyScraped = self.__loadRemote(tmpList, lItem)
elif cfg:
tmpList = self.__loadLocal(cfg, lItem)
if tmpList and len(tmpList.rules) > 0:
successfullyScraped = self.__loadRemote(tmpList, lItem)
# autoselect
if tmpList and tmpList.skill.find('autoselect') != -1 and len(tmpList.items) == 1:
m = tmpList.items[0]
m_type = m['type']
if m_type == 'rss':
common.log('Autoselect - ' + m['title'])
lItem = m
tmpList = self.parse(lItem).list
if not tmpList:
return ParsingResult(ParsingResult.Code.CFGSYNTAX_INVALID, None)
if tmpList and successfullyScraped == False:
return ParsingResult(ParsingResult.Code.WEBREQUEST_FAILED, tmpList)
# Remove duplicates
if tmpList.skill.find('allowDuplicates') == -1:
urls = []
for i in range(len(tmpList.items)-1,-1,-1):
item = tmpList.items[i]
tmpUrl = item['url']
tmpCfg = item['cfg']
if not tmpCfg:
tmpCfg = ''
if not urls.__contains__(tmpUrl + '|' + tmpCfg):
urls.append(tmpUrl + '|' + tmpCfg)
else:
tmpList.items.remove(item)
return ParsingResult(ParsingResult.Code.SUCCESS, tmpList)
"""
loads cfg, creates list and sets up rules for scraping
"""
def __loadLocal(self, filename, lItem = None):
params = []
#get Parameters
if filename.find('@') != -1:
params = filename.split('@')
filename = params.pop(0)
# get cfg file
cfg = filename
if not os.path.exists(cfg):
cfg = os.path.join(common.Paths.modulesDir, filename)
if not os.path.exists(cfg):
tmpPath = os.path.dirname(os.path.join(common.Paths.modulesDir, lItem["definedIn"]))
cfg = os.path.join(tmpPath ,filename)
if not os.path.exists(cfg):
srchFilename = filename
if filename.find('/') > -1:
srchFilename = srchFilename.split('/')[1]
try:
cfg = findInSubdirectory(srchFilename, common.Paths.modulesDir)
except:
try:
cfg = findInSubdirectory(srchFilename, common.Paths.favouritesFolder)
except:
try:
cfg = findInSubdirectory(srchFilename, common.Paths.customModulesDir)
except:
common.log('File not found: ' + srchFilename)
return None
#load file and apply parameters
data = getFileContent(cfg)
data = cr.CustomReplacements().replace(os.path.dirname(cfg), data, lItem, params)
#log
msg = 'Local file ' + filename + ' opened'
if len(params) > 0:
msg += ' with Parameter(s): '
msg += ",".join(params)
common.log(msg)
outputList = self.__parseCfg(filename, data, lItem)
return outputList
"""
scrape items according to rules and add them to the list
"""
def __loadRemote(self, inputList, lItem):
try:
inputList.curr_url = lItem['url']
count = 0
i = 1
maxits = 2 # 1 optimistic + 1 demystified
ignoreCache = False
demystify = False
back = ''
startUrl = inputList.curr_url
#print inputList, lItem
while count == 0 and i <= maxits:
if i > 1:
ignoreCache = True
demystify = True
# Trivial: url is from known streamer
if back:
lItem['referer'] = back
items = self.__parseHtml(inputList.curr_url, '"' + inputList.curr_url + '"', inputList.rules, inputList.skill, inputList.cfg, lItem)
count = len(items)
# try to find items in html source code
if count == 0:
referer = ''
if lItem['referer']:
referer = lItem['referer']
data = common.getHTML(inputList.curr_url, None, referer, False, False, ignoreCache, demystify)
if data == '':
return False
msg = 'Remote URL ' + inputList.curr_url + ' opened'
if demystify:
msg += ' (demystified)'
common.log(msg)
if inputList.section != '':
section = inputList.section
data = self.__getSection(data, section)
if lItem['section']:
section = lItem['section']
data = self.__getSection(data, section)
print("-----------",inputList.curr_url, inputList.skill, inputList.cfg, lItem)
items = self.__parseHtml(inputList.curr_url, data, inputList.rules, inputList.skill, inputList.cfg, lItem)
count = len(items)
common.log(' -> ' + str(count) + ' item(s) found')
# find rtmp stream
#common.log('Find rtmp stream')
if count == 0:
item = self.__findRTMP(data, startUrl, lItem)
if item:
items = []
items.append(item)
count = 1
# find embedding javascripts
#common.log('Find embedding javascripts')
if count == 0:
item = findJS(data)
if item:
firstJS = item[0]
streamId = firstJS[0]
jsUrl = firstJS[1]
if not jsUrl.startswith('http://'):
jsUrl = urllib.basejoin(startUrl,jsUrl)
streamerName = getHostName(jsUrl)
jsSource = getHTML(jsUrl, None, startUrl)
phpUrl = findPHP(jsSource, streamId)
if phpUrl:
data = getHTML(phpUrl, None, startUrl)
item = self.__findRTMP(data, phpUrl, lItem)
if item:
if streamerName:
item['title'] = item['title'].replace('RTMP', streamerName)
items = []
items.append(item)
count = 1
else:
red = phpUrl
try:
if (not red.startswith('http')): red='http:'+red
except: pass
common.log(' -> Redirect: ' + red)
if back == red:
break
back = inputList.curr_url
inputList.curr_url = red
common.log(str(len(inputList.items)) + ' items ' + inputList.cfg + ' -> ' + red)
startUrl = red
continue
# find redirects
#common.log('find redirects')
if count == 0:
red = self.__findRedirect(startUrl, inputList.curr_url)
if startUrl == red:
common.log(' -> No redirect found')
else:
try:
if (not red.startswith('http')): red = 'http:' + red
except:
pass
common.log(' -> Redirect: ' + red)
if back == red:
break
back = inputList.curr_url
inputList.curr_url = red
common.log(str(len(inputList.items)) + ' items ' + inputList.cfg + ' -> ' + red)
startUrl = red
i = 0
i += 1
if count != 0:
inputList.items = inputList.items + items
except:
traceback.print_exc(file = sys.stdout)
return False
return True
def __findRTMP(self, data, pageUrl, lItem):
rtmp = findRTMP(pageUrl, data)
if rtmp:
item = CListItem()
item['title'] = 'RTMP* - ' + rtmp[1]
item['type'] = 'video'
item['url'] = rtmp[0] + ' playPath=' + rtmp[1] + ' swfUrl=' + rtmp[2] +' swfVfy=1 live=true pageUrl=' + pageUrl
item.merge(lItem)
return item
return None
def __getSection(self, data, section):
p = re.compile(section, re.IGNORECASE + re.DOTALL + re.UNICODE)
m = p.search(data)
if m:
return m.group(0)
else:
common.log(' -> Section could not be found:' + section)
return data
def __findRedirect(self, page, referer='', demystify=False):
data = common.getHTML(page, None, referer=referer, xml=False, mobile=False, demystify=demystify)
if findContentRefreshLink(page, data):
return findContentRefreshLink(page, data)
elif findVideoFrameLink(page, data):
return findVideoFrameLink(page, data)
elif findEmbedPHPLink(data):
return findEmbedPHPLink(data)
if not demystify:
return self.__findRedirect(page, referer, True)
return page
def __parseCfg(self, cfgFile, data, lItem):
tmpList = CList()
data = data.replace('\r\n', '\n').split('\n')
items = []
tmp = None
hasOwnCfg = False
for m in data:
if m and m[0] != '#':
index = m.find('=')
if index != -1:
key = lower(m[:index]).strip()
value = m[index+1:]
index = value.find('|')
if value[:index] == 'sports.devil.locale':
value = common.translate(int(value[index+1:]))
elif value[:index] == 'sports.devil.image':
value = os.path.join(common.Paths.imgDir, value[index+1:])
if key == 'start':
tmpList.start = value
elif key == 'section':
tmpList.section = value
elif key == 'sort':
tmpList.sort = value
elif key == 'skill':
tmpList.skill = value
elif key == 'catcher':
tmpList.catcher = value
elif key == 'item_infos':
rule_tmp = CRuleItem()
hasOwnCfg = False
rule_tmp.infos = value
elif key == 'item_order':
rule_tmp.order = value
elif key == 'item_skill':
rule_tmp.skill = value
elif key == 'item_curr':
rule_tmp.curr = value
elif key == 'item_precheck':
rule_tmp.precheck = value
elif key.startswith('item_info'):
tmpkey = key[len('item_info'):]
if tmpkey == '_name':
info_tmp = CItemInfo()
info_tmp.name = value
if value == 'cfg':
hasOwnCfg = True
elif tmpkey == '_from':
info_tmp.src = value
elif tmpkey == '':
info_tmp.rule = value
elif tmpkey == '_default':
info_tmp.default = value
elif tmpkey == '_convert':
info_tmp.convert.append(value)
elif tmpkey == '_build':
info_tmp.build = value
rule_tmp.info_list.append(info_tmp)
elif key == 'item_url_build':
rule_tmp.url_build = value
if tmpList.catcher != '':
refInf = CItemInfo()
refInf.name = 'referer'
refInf.build = value
rule_tmp.info_list.append(refInf)
if not hasOwnCfg:
refInf = CItemInfo()
refInf.name = 'catcher'
refInf.build = tmpList.catcher
rule_tmp.info_list.append(refInf)
tmpList.rules.append(rule_tmp)
# static menu items (without regex)
elif key == 'title':
tmp = CListItem()
tmp['title'] = value
if tmpList.skill.find('videoTitle') > -1:
tmp['videoTitle'] = value
elif key == 'url':
tmp['url'] = value
if lItem:
tmp.merge(lItem)
if tmpList.catcher != '':
tmp['referer'] = value
if not hasOwnCfg:
tmp['catcher'] = tmpList.catcher
tmp['definedIn'] = cfgFile
items.append(tmp)
tmp = None
elif tmp != None:
if key == 'cfg':
hasOwnCfg = True
tmp[key] = value
tmpList.items = items
tmpList.cfg = cfgFile
return tmpList
def __parseHtml(self, url, data, rules, skills, definedIn, lItem):
#common.log('_parseHtml called' + url)
items = []
for item_rule in rules:
#common.log('rule: ' + item_rule.infos)
if not hasattr(item_rule, 'precheck') or (item_rule.precheck in data):
revid = re.compile(item_rule.infos, re.IGNORECASE + re.DOTALL + re.MULTILINE + re.UNICODE)
for reinfos in revid.findall(data):
tmp = CListItem()
if lItem['referer']:
tmp['referer'] = lItem['referer']
if item_rule.order.find('|') != -1:
infos_names = item_rule.order.split('|')
infos_values = list(reinfos)
i = 0
for name in infos_names:
tmp[name] = infos_values[i]
i = i+1
else:
tmp[item_rule.order] = reinfos
for info in item_rule.info_list:
info_value = tmp[info.name]
if info_value:
if info.build.find('%s') != -1:
tmpVal = info.build % info_value
tmp[info.name] = tmpVal
continue
if info.build.find('%s') != -1:
if info.src.__contains__('+'):
tmpArr = info.src.split('+')
src = ''
for t in tmpArr:
t = t.strip()
if t.find('\'') != -1:
src = src + t.strip('\'')
else:
src = src + (tmp[t] or '')
elif info.src.__contains__('||'):
variables = info.src.split('||')
src = firstNonEmpty(tmp, variables)
else:
src = tmp[info.src]
if src and info.convert != []:
tmp['referer'] = url
src = self.__parseCommands(tmp, src, info.convert)
if isinstance(src, dict):
for dKey in src:
tmp[dKey] = src[dKey]
src = src.values()[0]
info_value = info.build % (src)
else:
info_value = info.build
tmp[info.name] = info_value
if tmp['url']:
tmp['url'] = item_rule.url_build % (tmp['url'])
else:
tmp['url'] = url
tmp.merge(lItem)
if item_rule.skill.find('append') != -1:
tmp['url'] = url + tmp['url']
if item_rule.skill.find('space') != -1:
tmp['title'] = ' %s ' % tmp['title'].strip()
if skills.find('videoTitle') > -1:
tmp['videoTitle'] = tmp['title']
tmp['definedIn'] = definedIn
items.append(tmp)
return items
def __parseCommands(self, item, src, convCommands):
#common.log('_parseCommands called')
#common.log('_parseCommands called %s | %s | %s' % (item,src, convCommands))
# helping function
def parseCommand(txt):
command = {"command": txt, "params": ""}
if txt.find("(") > -1:
command["command"] = txt[0:txt.find("(")]
command["params"] = txt[len(command["command"]) + 1:-1]
return command
for convCommand in convCommands:
pComm = parseCommand(convCommand)
command = pComm["command"]
params = pComm["params"]
if params.find('@REFERER@'):
referer = item['referer']
if not referer:
referer = ''
params = params.replace('@REFERER@', referer)
if command == 'convDate':
src = cc.convDate(params, src)
elif command =='currenturl':
print("--------------curenturl ------------------------")
src= getFileContent(os.path.join(common.Paths.cacheDir, 'lasturl'))
print("--------------curenturl ------------------------",src)
elif command =='iklub':
common.log('--------------ikulb ------------------------')
common.log('src: %s' % src)
src = cc.decodeIklub(src)
common.log('src: %s' % src)
#common.log('--------------ikulb ------------------------')
elif command =='decodemrknow2':
common.log('--------------decodemrknow2 ------------------------')
#common.log('src: %s' % src)
src = cc.decodeMrknow2(src)
#common.log('src: %s' % src)
elif command =='decodemrknow3':
common.log('--------------decodemrknow3 ------------------------')
common.log('src: %s' % src)
src = cc.decodeMrknow3(src)
#common.log('src: %s' % src)
elif command == 'convTimestamp':
src = cc.convTimestamp(params, src)
elif command == 'select':
src = cc.select(params, src)
if not src:
continue
elif command == 'unicode_escape':
src = src.decode('unicode-escape')
elif command == 'replaceFromDict':
dictName = str(params.strip('\''))
path = os.path.join(common.Paths.dictsDir, dictName + '.txt')
if not (os.path.exists(path)):
common.log('Dictionary file not found: ' + path)
continue
src = cc.replaceFromDict(path, src)
elif command == 'time':
src = time.time()
elif command == 'timediff':
src = dt.timediff(src,params.strip('\''))
elif command == 'offset':
src = cc.offset(params, src)
elif command == 'getSource':
src = cc.getSource(params, src)
elif command == 'quote':
try:
src = urllib.quote(params.strip("'").replace('%s', src),'')
except:
cleanParams = params.strip("'")
cleanParams = cleanParams.replace("%s",src)
src = urllib.quote(cleanParams.encode('utf-8'),'')
elif command == 'unquote':
src = urllib.unquote(params.strip("'").replace('%s', src))
elif command == 'parseText':
src = cc.parseText(item, params, src)
elif command == 'getInfo':
src = cc.getInfo(item, params, src)
elif command == 'getXML':
src = cc.getInfo(item, params, src, xml=True)
elif command == 'getMobile':
src = cc.getInfo(item, params, src, mobile=True)
elif command == 'decodeBase64':
src = cc.decodeBase64(src)
elif command == 'decodeRawUnicode':
src = cc.decodeRawUnicode(src)
elif command == 'resolve':
src = cc.resolve(src)
elif command == 'decodeXppod':
src = cc.decodeXppod(src)
elif command == 'decodeXppodHLS':
src = cc.decodeXppod_hls(src)
elif command == 'decodeMrknow1':
src = cc.decodeMrknow1(src)
elif command == 'replace':
src = cc.replace(params, src)
elif command == 'replaceRegex':
src = cc.replaceRegex(params, src)
elif command == 'ifEmpty':
src = cc.ifEmpty(item, params, src)
elif command == 'isEqual':
src = cc.isEqual(item, params, src)
elif command == 'ifFileExists':
src = cc.ifFileExists(item, params, src)
elif command == 'ifExists':
src = cc.ifExists(item, params, src)
elif command == 'encryptJimey':
src = crypt.encryptJimey(params.strip("'").replace('%s', src))
elif command == 'gAesDec':
src = crypt.gAesDec(src,item.infos[params])
elif command == 'aesDec':
src = crypt.aesDec(src,item.infos[params])
elif command == 'getCookies':
src = cc.getCookies(params, src)
elif command == 'destreamer':
src = crypt.destreamer(params.strip("'").replace('%s', src))
elif command == 'unixTimestamp':
src = dt.getUnixTimestamp()
elif command == 'rowbalance':
src = rb.get()
elif command == 'urlMerge':
src = cc.urlMerge(params, src)
elif command == 'translate':
try:
src = common.translate(int(src))
except:
pass
elif command == 'camelcase':
src = string.capwords(string.capwords(src, '-'))
elif command == 'lowercase':
src = string.lower(src)
elif command == 'reverse':
src = src[::-1]
elif command == 'demystify':
print 'demystify'
src = crypt.doDemystify(src)
print 'after demystify',src
elif command == 'random':
paramArr = params.split(',')
minimum = int(paramArr[0])
maximum = int(paramArr[1])
src = str(random.randrange(minimum,maximum))
elif command == 'debug':
common.log('--------------debug ------------------------')
common.log('Debug from cfg file: ' + src)
elif command == 'divide':
paramArr = params.split(',')
a = paramArr[0].strip().strip("'").replace('%s', src)
a = resolveVariable(a, item)
b = paramArr[1].strip().strip("'").replace('%s', src)
b = resolveVariable(b, item)
if not a or not b:
continue
a = int(a)
b = int(b)
try:
src = str(a/b)
except:
pass
return src
def resolveVariable(varStr, item):
if varStr.startswith('@') and varStr.endswith('@'):
return item.getInfo(varStr.strip('@'))
return varStr
def firstNonEmpty(tmp, variables):
for v in variables:
vClean = v.strip()
if vClean.find("'") != -1:
vClean = vClean.strip("'")
else:
vClean = tmp.getInfo(vClean)
if vClean != '':
return vClean
return ''
| apache-2.0 | -2,253,433,605,346,059,300 | 35.275194 | 148 | 0.435766 | false |
samithaj/headphones | lib/pytz/__init__.py | 61 | 34011 | '''
datetime.tzinfo timezone definitions generated from the
Olson timezone database:
ftp://elsie.nci.nih.gov/pub/tz*.tar.gz
See the datetime section of the Python Library Reference for information
on how to use these modules.
'''
# The Olson database is updated several times a year.
OLSON_VERSION = '2014j'
VERSION = '2014.10' # Switching to pip compatible version numbering.
__version__ = VERSION
OLSEN_VERSION = OLSON_VERSION # Old releases had this misspelling
__all__ = [
'timezone', 'utc', 'country_timezones', 'country_names',
'AmbiguousTimeError', 'InvalidTimeError',
'NonExistentTimeError', 'UnknownTimeZoneError',
'all_timezones', 'all_timezones_set',
'common_timezones', 'common_timezones_set',
]
import sys, datetime, os.path, gettext
try:
from pkg_resources import resource_stream
except ImportError:
resource_stream = None
from pytz.exceptions import AmbiguousTimeError
from pytz.exceptions import InvalidTimeError
from pytz.exceptions import NonExistentTimeError
from pytz.exceptions import UnknownTimeZoneError
from pytz.lazy import LazyDict, LazyList, LazySet
from pytz.tzinfo import unpickler
from pytz.tzfile import build_tzinfo, _byte_string
try:
unicode
except NameError: # Python 3.x
# Python 3.x doesn't have unicode(), making writing code
# for Python 2.3 and Python 3.x a pain.
unicode = str
def ascii(s):
r"""
>>> ascii('Hello')
'Hello'
>>> ascii('\N{TRADE MARK SIGN}') #doctest: +IGNORE_EXCEPTION_DETAIL
Traceback (most recent call last):
...
UnicodeEncodeError: ...
"""
s.encode('US-ASCII') # Raise an exception if not ASCII
return s # But return the original string - not a byte string.
else: # Python 2.x
def ascii(s):
r"""
>>> ascii('Hello')
'Hello'
>>> ascii(u'Hello')
'Hello'
>>> ascii(u'\N{TRADE MARK SIGN}') #doctest: +IGNORE_EXCEPTION_DETAIL
Traceback (most recent call last):
...
UnicodeEncodeError: ...
"""
return s.encode('US-ASCII')
def open_resource(name):
"""Open a resource from the zoneinfo subdir for reading.
Uses the pkg_resources module if available and no standard file
found at the calculated location.
"""
name_parts = name.lstrip('/').split('/')
for part in name_parts:
if part == os.path.pardir or os.path.sep in part:
raise ValueError('Bad path segment: %r' % part)
filename = os.path.join(os.path.dirname(__file__),
'zoneinfo', *name_parts)
if not os.path.exists(filename) and resource_stream is not None:
# http://bugs.launchpad.net/bugs/383171 - we avoid using this
# unless absolutely necessary to help when a broken version of
# pkg_resources is installed.
return resource_stream(__name__, 'zoneinfo/' + name)
return open(filename, 'rb')
def resource_exists(name):
"""Return true if the given resource exists"""
try:
open_resource(name).close()
return True
except IOError:
return False
# Enable this when we get some translations?
# We want an i18n API that is useful to programs using Python's gettext
# module, as well as the Zope3 i18n package. Perhaps we should just provide
# the POT file and translations, and leave it up to callers to make use
# of them.
#
# t = gettext.translation(
# 'pytz', os.path.join(os.path.dirname(__file__), 'locales'),
# fallback=True
# )
# def _(timezone_name):
# """Translate a timezone name using the current locale, returning Unicode"""
# return t.ugettext(timezone_name)
_tzinfo_cache = {}
def timezone(zone):
r''' Return a datetime.tzinfo implementation for the given timezone
>>> from datetime import datetime, timedelta
>>> utc = timezone('UTC')
>>> eastern = timezone('US/Eastern')
>>> eastern.zone
'US/Eastern'
>>> timezone(unicode('US/Eastern')) is eastern
True
>>> utc_dt = datetime(2002, 10, 27, 6, 0, 0, tzinfo=utc)
>>> loc_dt = utc_dt.astimezone(eastern)
>>> fmt = '%Y-%m-%d %H:%M:%S %Z (%z)'
>>> loc_dt.strftime(fmt)
'2002-10-27 01:00:00 EST (-0500)'
>>> (loc_dt - timedelta(minutes=10)).strftime(fmt)
'2002-10-27 00:50:00 EST (-0500)'
>>> eastern.normalize(loc_dt - timedelta(minutes=10)).strftime(fmt)
'2002-10-27 01:50:00 EDT (-0400)'
>>> (loc_dt + timedelta(minutes=10)).strftime(fmt)
'2002-10-27 01:10:00 EST (-0500)'
Raises UnknownTimeZoneError if passed an unknown zone.
>>> try:
... timezone('Asia/Shangri-La')
... except UnknownTimeZoneError:
... print('Unknown')
Unknown
>>> try:
... timezone(unicode('\N{TRADE MARK SIGN}'))
... except UnknownTimeZoneError:
... print('Unknown')
Unknown
'''
if zone.upper() == 'UTC':
return utc
try:
zone = ascii(zone)
except UnicodeEncodeError:
# All valid timezones are ASCII
raise UnknownTimeZoneError(zone)
zone = _unmunge_zone(zone)
if zone not in _tzinfo_cache:
if zone in all_timezones_set:
fp = open_resource(zone)
try:
_tzinfo_cache[zone] = build_tzinfo(zone, fp)
finally:
fp.close()
else:
raise UnknownTimeZoneError(zone)
return _tzinfo_cache[zone]
def _unmunge_zone(zone):
"""Undo the time zone name munging done by older versions of pytz."""
return zone.replace('_plus_', '+').replace('_minus_', '-')
ZERO = datetime.timedelta(0)
HOUR = datetime.timedelta(hours=1)
class UTC(datetime.tzinfo):
"""UTC
Optimized UTC implementation. It unpickles using the single module global
instance defined beneath this class declaration.
"""
zone = "UTC"
_utcoffset = ZERO
_dst = ZERO
_tzname = zone
def fromutc(self, dt):
if dt.tzinfo is None:
return self.localize(dt)
return super(utc.__class__, self).fromutc(dt)
def utcoffset(self, dt):
return ZERO
def tzname(self, dt):
return "UTC"
def dst(self, dt):
return ZERO
def __reduce__(self):
return _UTC, ()
def localize(self, dt, is_dst=False):
'''Convert naive time to local time'''
if dt.tzinfo is not None:
raise ValueError('Not naive datetime (tzinfo is already set)')
return dt.replace(tzinfo=self)
def normalize(self, dt, is_dst=False):
'''Correct the timezone information on the given datetime'''
if dt.tzinfo is self:
return dt
if dt.tzinfo is None:
raise ValueError('Naive time - no tzinfo set')
return dt.astimezone(self)
def __repr__(self):
return "<UTC>"
def __str__(self):
return "UTC"
UTC = utc = UTC() # UTC is a singleton
def _UTC():
"""Factory function for utc unpickling.
Makes sure that unpickling a utc instance always returns the same
module global.
These examples belong in the UTC class above, but it is obscured; or in
the README.txt, but we are not depending on Python 2.4 so integrating
the README.txt examples with the unit tests is not trivial.
>>> import datetime, pickle
>>> dt = datetime.datetime(2005, 3, 1, 14, 13, 21, tzinfo=utc)
>>> naive = dt.replace(tzinfo=None)
>>> p = pickle.dumps(dt, 1)
>>> naive_p = pickle.dumps(naive, 1)
>>> len(p) - len(naive_p)
17
>>> new = pickle.loads(p)
>>> new == dt
True
>>> new is dt
False
>>> new.tzinfo is dt.tzinfo
True
>>> utc is UTC is timezone('UTC')
True
>>> utc is timezone('GMT')
False
"""
return utc
_UTC.__safe_for_unpickling__ = True
def _p(*args):
"""Factory function for unpickling pytz tzinfo instances.
Just a wrapper around tzinfo.unpickler to save a few bytes in each pickle
by shortening the path.
"""
return unpickler(*args)
_p.__safe_for_unpickling__ = True
class _CountryTimezoneDict(LazyDict):
"""Map ISO 3166 country code to a list of timezone names commonly used
in that country.
iso3166_code is the two letter code used to identify the country.
>>> def print_list(list_of_strings):
... 'We use a helper so doctests work under Python 2.3 -> 3.x'
... for s in list_of_strings:
... print(s)
>>> print_list(country_timezones['nz'])
Pacific/Auckland
Pacific/Chatham
>>> print_list(country_timezones['ch'])
Europe/Zurich
>>> print_list(country_timezones['CH'])
Europe/Zurich
>>> print_list(country_timezones[unicode('ch')])
Europe/Zurich
>>> print_list(country_timezones['XXX'])
Traceback (most recent call last):
...
KeyError: 'XXX'
Previously, this information was exposed as a function rather than a
dictionary. This is still supported::
>>> print_list(country_timezones('nz'))
Pacific/Auckland
Pacific/Chatham
"""
def __call__(self, iso3166_code):
"""Backwards compatibility."""
return self[iso3166_code]
def _fill(self):
data = {}
zone_tab = open_resource('zone.tab')
try:
for line in zone_tab:
line = line.decode('US-ASCII')
if line.startswith('#'):
continue
code, coordinates, zone = line.split(None, 4)[:3]
if zone not in all_timezones_set:
continue
try:
data[code].append(zone)
except KeyError:
data[code] = [zone]
self.data = data
finally:
zone_tab.close()
country_timezones = _CountryTimezoneDict()
class _CountryNameDict(LazyDict):
'''Dictionary proving ISO3166 code -> English name.
>>> print(country_names['au'])
Australia
'''
def _fill(self):
data = {}
zone_tab = open_resource('iso3166.tab')
try:
for line in zone_tab.readlines():
line = line.decode('US-ASCII')
if line.startswith('#'):
continue
code, name = line.split(None, 1)
data[code] = name.strip()
self.data = data
finally:
zone_tab.close()
country_names = _CountryNameDict()
# Time-zone info based solely on fixed offsets
class _FixedOffset(datetime.tzinfo):
zone = None # to match the standard pytz API
def __init__(self, minutes):
if abs(minutes) >= 1440:
raise ValueError("absolute offset is too large", minutes)
self._minutes = minutes
self._offset = datetime.timedelta(minutes=minutes)
def utcoffset(self, dt):
return self._offset
def __reduce__(self):
return FixedOffset, (self._minutes, )
def dst(self, dt):
return ZERO
def tzname(self, dt):
return None
def __repr__(self):
return 'pytz.FixedOffset(%d)' % self._minutes
def localize(self, dt, is_dst=False):
'''Convert naive time to local time'''
if dt.tzinfo is not None:
raise ValueError('Not naive datetime (tzinfo is already set)')
return dt.replace(tzinfo=self)
def normalize(self, dt, is_dst=False):
'''Correct the timezone information on the given datetime'''
if dt.tzinfo is None:
raise ValueError('Naive time - no tzinfo set')
return dt.replace(tzinfo=self)
def FixedOffset(offset, _tzinfos = {}):
"""return a fixed-offset timezone based off a number of minutes.
>>> one = FixedOffset(-330)
>>> one
pytz.FixedOffset(-330)
>>> one.utcoffset(datetime.datetime.now())
datetime.timedelta(-1, 66600)
>>> one.dst(datetime.datetime.now())
datetime.timedelta(0)
>>> two = FixedOffset(1380)
>>> two
pytz.FixedOffset(1380)
>>> two.utcoffset(datetime.datetime.now())
datetime.timedelta(0, 82800)
>>> two.dst(datetime.datetime.now())
datetime.timedelta(0)
The datetime.timedelta must be between the range of -1 and 1 day,
non-inclusive.
>>> FixedOffset(1440)
Traceback (most recent call last):
...
ValueError: ('absolute offset is too large', 1440)
>>> FixedOffset(-1440)
Traceback (most recent call last):
...
ValueError: ('absolute offset is too large', -1440)
An offset of 0 is special-cased to return UTC.
>>> FixedOffset(0) is UTC
True
There should always be only one instance of a FixedOffset per timedelta.
This should be true for multiple creation calls.
>>> FixedOffset(-330) is one
True
>>> FixedOffset(1380) is two
True
It should also be true for pickling.
>>> import pickle
>>> pickle.loads(pickle.dumps(one)) is one
True
>>> pickle.loads(pickle.dumps(two)) is two
True
"""
if offset == 0:
return UTC
info = _tzinfos.get(offset)
if info is None:
# We haven't seen this one before. we need to save it.
# Use setdefault to avoid a race condition and make sure we have
# only one
info = _tzinfos.setdefault(offset, _FixedOffset(offset))
return info
FixedOffset.__safe_for_unpickling__ = True
def _test():
import doctest, os, sys
sys.path.insert(0, os.pardir)
import pytz
return doctest.testmod(pytz)
if __name__ == '__main__':
_test()
all_timezones = \
['Africa/Abidjan',
'Africa/Accra',
'Africa/Addis_Ababa',
'Africa/Algiers',
'Africa/Asmara',
'Africa/Asmera',
'Africa/Bamako',
'Africa/Bangui',
'Africa/Banjul',
'Africa/Bissau',
'Africa/Blantyre',
'Africa/Brazzaville',
'Africa/Bujumbura',
'Africa/Cairo',
'Africa/Casablanca',
'Africa/Ceuta',
'Africa/Conakry',
'Africa/Dakar',
'Africa/Dar_es_Salaam',
'Africa/Djibouti',
'Africa/Douala',
'Africa/El_Aaiun',
'Africa/Freetown',
'Africa/Gaborone',
'Africa/Harare',
'Africa/Johannesburg',
'Africa/Juba',
'Africa/Kampala',
'Africa/Khartoum',
'Africa/Kigali',
'Africa/Kinshasa',
'Africa/Lagos',
'Africa/Libreville',
'Africa/Lome',
'Africa/Luanda',
'Africa/Lubumbashi',
'Africa/Lusaka',
'Africa/Malabo',
'Africa/Maputo',
'Africa/Maseru',
'Africa/Mbabane',
'Africa/Mogadishu',
'Africa/Monrovia',
'Africa/Nairobi',
'Africa/Ndjamena',
'Africa/Niamey',
'Africa/Nouakchott',
'Africa/Ouagadougou',
'Africa/Porto-Novo',
'Africa/Sao_Tome',
'Africa/Timbuktu',
'Africa/Tripoli',
'Africa/Tunis',
'Africa/Windhoek',
'America/Adak',
'America/Anchorage',
'America/Anguilla',
'America/Antigua',
'America/Araguaina',
'America/Argentina/Buenos_Aires',
'America/Argentina/Catamarca',
'America/Argentina/ComodRivadavia',
'America/Argentina/Cordoba',
'America/Argentina/Jujuy',
'America/Argentina/La_Rioja',
'America/Argentina/Mendoza',
'America/Argentina/Rio_Gallegos',
'America/Argentina/Salta',
'America/Argentina/San_Juan',
'America/Argentina/San_Luis',
'America/Argentina/Tucuman',
'America/Argentina/Ushuaia',
'America/Aruba',
'America/Asuncion',
'America/Atikokan',
'America/Atka',
'America/Bahia',
'America/Bahia_Banderas',
'America/Barbados',
'America/Belem',
'America/Belize',
'America/Blanc-Sablon',
'America/Boa_Vista',
'America/Bogota',
'America/Boise',
'America/Buenos_Aires',
'America/Cambridge_Bay',
'America/Campo_Grande',
'America/Cancun',
'America/Caracas',
'America/Catamarca',
'America/Cayenne',
'America/Cayman',
'America/Chicago',
'America/Chihuahua',
'America/Coral_Harbour',
'America/Cordoba',
'America/Costa_Rica',
'America/Creston',
'America/Cuiaba',
'America/Curacao',
'America/Danmarkshavn',
'America/Dawson',
'America/Dawson_Creek',
'America/Denver',
'America/Detroit',
'America/Dominica',
'America/Edmonton',
'America/Eirunepe',
'America/El_Salvador',
'America/Ensenada',
'America/Fort_Wayne',
'America/Fortaleza',
'America/Glace_Bay',
'America/Godthab',
'America/Goose_Bay',
'America/Grand_Turk',
'America/Grenada',
'America/Guadeloupe',
'America/Guatemala',
'America/Guayaquil',
'America/Guyana',
'America/Halifax',
'America/Havana',
'America/Hermosillo',
'America/Indiana/Indianapolis',
'America/Indiana/Knox',
'America/Indiana/Marengo',
'America/Indiana/Petersburg',
'America/Indiana/Tell_City',
'America/Indiana/Vevay',
'America/Indiana/Vincennes',
'America/Indiana/Winamac',
'America/Indianapolis',
'America/Inuvik',
'America/Iqaluit',
'America/Jamaica',
'America/Jujuy',
'America/Juneau',
'America/Kentucky/Louisville',
'America/Kentucky/Monticello',
'America/Knox_IN',
'America/Kralendijk',
'America/La_Paz',
'America/Lima',
'America/Los_Angeles',
'America/Louisville',
'America/Lower_Princes',
'America/Maceio',
'America/Managua',
'America/Manaus',
'America/Marigot',
'America/Martinique',
'America/Matamoros',
'America/Mazatlan',
'America/Mendoza',
'America/Menominee',
'America/Merida',
'America/Metlakatla',
'America/Mexico_City',
'America/Miquelon',
'America/Moncton',
'America/Monterrey',
'America/Montevideo',
'America/Montreal',
'America/Montserrat',
'America/Nassau',
'America/New_York',
'America/Nipigon',
'America/Nome',
'America/Noronha',
'America/North_Dakota/Beulah',
'America/North_Dakota/Center',
'America/North_Dakota/New_Salem',
'America/Ojinaga',
'America/Panama',
'America/Pangnirtung',
'America/Paramaribo',
'America/Phoenix',
'America/Port-au-Prince',
'America/Port_of_Spain',
'America/Porto_Acre',
'America/Porto_Velho',
'America/Puerto_Rico',
'America/Rainy_River',
'America/Rankin_Inlet',
'America/Recife',
'America/Regina',
'America/Resolute',
'America/Rio_Branco',
'America/Rosario',
'America/Santa_Isabel',
'America/Santarem',
'America/Santiago',
'America/Santo_Domingo',
'America/Sao_Paulo',
'America/Scoresbysund',
'America/Shiprock',
'America/Sitka',
'America/St_Barthelemy',
'America/St_Johns',
'America/St_Kitts',
'America/St_Lucia',
'America/St_Thomas',
'America/St_Vincent',
'America/Swift_Current',
'America/Tegucigalpa',
'America/Thule',
'America/Thunder_Bay',
'America/Tijuana',
'America/Toronto',
'America/Tortola',
'America/Vancouver',
'America/Virgin',
'America/Whitehorse',
'America/Winnipeg',
'America/Yakutat',
'America/Yellowknife',
'Antarctica/Casey',
'Antarctica/Davis',
'Antarctica/DumontDUrville',
'Antarctica/Macquarie',
'Antarctica/Mawson',
'Antarctica/McMurdo',
'Antarctica/Palmer',
'Antarctica/Rothera',
'Antarctica/South_Pole',
'Antarctica/Syowa',
'Antarctica/Troll',
'Antarctica/Vostok',
'Arctic/Longyearbyen',
'Asia/Aden',
'Asia/Almaty',
'Asia/Amman',
'Asia/Anadyr',
'Asia/Aqtau',
'Asia/Aqtobe',
'Asia/Ashgabat',
'Asia/Ashkhabad',
'Asia/Baghdad',
'Asia/Bahrain',
'Asia/Baku',
'Asia/Bangkok',
'Asia/Beirut',
'Asia/Bishkek',
'Asia/Brunei',
'Asia/Calcutta',
'Asia/Chita',
'Asia/Choibalsan',
'Asia/Chongqing',
'Asia/Chungking',
'Asia/Colombo',
'Asia/Dacca',
'Asia/Damascus',
'Asia/Dhaka',
'Asia/Dili',
'Asia/Dubai',
'Asia/Dushanbe',
'Asia/Gaza',
'Asia/Harbin',
'Asia/Hebron',
'Asia/Ho_Chi_Minh',
'Asia/Hong_Kong',
'Asia/Hovd',
'Asia/Irkutsk',
'Asia/Istanbul',
'Asia/Jakarta',
'Asia/Jayapura',
'Asia/Jerusalem',
'Asia/Kabul',
'Asia/Kamchatka',
'Asia/Karachi',
'Asia/Kashgar',
'Asia/Kathmandu',
'Asia/Katmandu',
'Asia/Khandyga',
'Asia/Kolkata',
'Asia/Krasnoyarsk',
'Asia/Kuala_Lumpur',
'Asia/Kuching',
'Asia/Kuwait',
'Asia/Macao',
'Asia/Macau',
'Asia/Magadan',
'Asia/Makassar',
'Asia/Manila',
'Asia/Muscat',
'Asia/Nicosia',
'Asia/Novokuznetsk',
'Asia/Novosibirsk',
'Asia/Omsk',
'Asia/Oral',
'Asia/Phnom_Penh',
'Asia/Pontianak',
'Asia/Pyongyang',
'Asia/Qatar',
'Asia/Qyzylorda',
'Asia/Rangoon',
'Asia/Riyadh',
'Asia/Saigon',
'Asia/Sakhalin',
'Asia/Samarkand',
'Asia/Seoul',
'Asia/Shanghai',
'Asia/Singapore',
'Asia/Srednekolymsk',
'Asia/Taipei',
'Asia/Tashkent',
'Asia/Tbilisi',
'Asia/Tehran',
'Asia/Tel_Aviv',
'Asia/Thimbu',
'Asia/Thimphu',
'Asia/Tokyo',
'Asia/Ujung_Pandang',
'Asia/Ulaanbaatar',
'Asia/Ulan_Bator',
'Asia/Urumqi',
'Asia/Ust-Nera',
'Asia/Vientiane',
'Asia/Vladivostok',
'Asia/Yakutsk',
'Asia/Yekaterinburg',
'Asia/Yerevan',
'Atlantic/Azores',
'Atlantic/Bermuda',
'Atlantic/Canary',
'Atlantic/Cape_Verde',
'Atlantic/Faeroe',
'Atlantic/Faroe',
'Atlantic/Jan_Mayen',
'Atlantic/Madeira',
'Atlantic/Reykjavik',
'Atlantic/South_Georgia',
'Atlantic/St_Helena',
'Atlantic/Stanley',
'Australia/ACT',
'Australia/Adelaide',
'Australia/Brisbane',
'Australia/Broken_Hill',
'Australia/Canberra',
'Australia/Currie',
'Australia/Darwin',
'Australia/Eucla',
'Australia/Hobart',
'Australia/LHI',
'Australia/Lindeman',
'Australia/Lord_Howe',
'Australia/Melbourne',
'Australia/NSW',
'Australia/North',
'Australia/Perth',
'Australia/Queensland',
'Australia/South',
'Australia/Sydney',
'Australia/Tasmania',
'Australia/Victoria',
'Australia/West',
'Australia/Yancowinna',
'Brazil/Acre',
'Brazil/DeNoronha',
'Brazil/East',
'Brazil/West',
'CET',
'CST6CDT',
'Canada/Atlantic',
'Canada/Central',
'Canada/East-Saskatchewan',
'Canada/Eastern',
'Canada/Mountain',
'Canada/Newfoundland',
'Canada/Pacific',
'Canada/Saskatchewan',
'Canada/Yukon',
'Chile/Continental',
'Chile/EasterIsland',
'Cuba',
'EET',
'EST',
'EST5EDT',
'Egypt',
'Eire',
'Etc/GMT',
'Etc/GMT+0',
'Etc/GMT+1',
'Etc/GMT+10',
'Etc/GMT+11',
'Etc/GMT+12',
'Etc/GMT+2',
'Etc/GMT+3',
'Etc/GMT+4',
'Etc/GMT+5',
'Etc/GMT+6',
'Etc/GMT+7',
'Etc/GMT+8',
'Etc/GMT+9',
'Etc/GMT-0',
'Etc/GMT-1',
'Etc/GMT-10',
'Etc/GMT-11',
'Etc/GMT-12',
'Etc/GMT-13',
'Etc/GMT-14',
'Etc/GMT-2',
'Etc/GMT-3',
'Etc/GMT-4',
'Etc/GMT-5',
'Etc/GMT-6',
'Etc/GMT-7',
'Etc/GMT-8',
'Etc/GMT-9',
'Etc/GMT0',
'Etc/Greenwich',
'Etc/UCT',
'Etc/UTC',
'Etc/Universal',
'Etc/Zulu',
'Europe/Amsterdam',
'Europe/Andorra',
'Europe/Athens',
'Europe/Belfast',
'Europe/Belgrade',
'Europe/Berlin',
'Europe/Bratislava',
'Europe/Brussels',
'Europe/Bucharest',
'Europe/Budapest',
'Europe/Busingen',
'Europe/Chisinau',
'Europe/Copenhagen',
'Europe/Dublin',
'Europe/Gibraltar',
'Europe/Guernsey',
'Europe/Helsinki',
'Europe/Isle_of_Man',
'Europe/Istanbul',
'Europe/Jersey',
'Europe/Kaliningrad',
'Europe/Kiev',
'Europe/Lisbon',
'Europe/Ljubljana',
'Europe/London',
'Europe/Luxembourg',
'Europe/Madrid',
'Europe/Malta',
'Europe/Mariehamn',
'Europe/Minsk',
'Europe/Monaco',
'Europe/Moscow',
'Europe/Nicosia',
'Europe/Oslo',
'Europe/Paris',
'Europe/Podgorica',
'Europe/Prague',
'Europe/Riga',
'Europe/Rome',
'Europe/Samara',
'Europe/San_Marino',
'Europe/Sarajevo',
'Europe/Simferopol',
'Europe/Skopje',
'Europe/Sofia',
'Europe/Stockholm',
'Europe/Tallinn',
'Europe/Tirane',
'Europe/Tiraspol',
'Europe/Uzhgorod',
'Europe/Vaduz',
'Europe/Vatican',
'Europe/Vienna',
'Europe/Vilnius',
'Europe/Volgograd',
'Europe/Warsaw',
'Europe/Zagreb',
'Europe/Zaporozhye',
'Europe/Zurich',
'GB',
'GB-Eire',
'GMT',
'GMT+0',
'GMT-0',
'GMT0',
'Greenwich',
'HST',
'Hongkong',
'Iceland',
'Indian/Antananarivo',
'Indian/Chagos',
'Indian/Christmas',
'Indian/Cocos',
'Indian/Comoro',
'Indian/Kerguelen',
'Indian/Mahe',
'Indian/Maldives',
'Indian/Mauritius',
'Indian/Mayotte',
'Indian/Reunion',
'Iran',
'Israel',
'Jamaica',
'Japan',
'Kwajalein',
'Libya',
'MET',
'MST',
'MST7MDT',
'Mexico/BajaNorte',
'Mexico/BajaSur',
'Mexico/General',
'NZ',
'NZ-CHAT',
'Navajo',
'PRC',
'PST8PDT',
'Pacific/Apia',
'Pacific/Auckland',
'Pacific/Bougainville',
'Pacific/Chatham',
'Pacific/Chuuk',
'Pacific/Easter',
'Pacific/Efate',
'Pacific/Enderbury',
'Pacific/Fakaofo',
'Pacific/Fiji',
'Pacific/Funafuti',
'Pacific/Galapagos',
'Pacific/Gambier',
'Pacific/Guadalcanal',
'Pacific/Guam',
'Pacific/Honolulu',
'Pacific/Johnston',
'Pacific/Kiritimati',
'Pacific/Kosrae',
'Pacific/Kwajalein',
'Pacific/Majuro',
'Pacific/Marquesas',
'Pacific/Midway',
'Pacific/Nauru',
'Pacific/Niue',
'Pacific/Norfolk',
'Pacific/Noumea',
'Pacific/Pago_Pago',
'Pacific/Palau',
'Pacific/Pitcairn',
'Pacific/Pohnpei',
'Pacific/Ponape',
'Pacific/Port_Moresby',
'Pacific/Rarotonga',
'Pacific/Saipan',
'Pacific/Samoa',
'Pacific/Tahiti',
'Pacific/Tarawa',
'Pacific/Tongatapu',
'Pacific/Truk',
'Pacific/Wake',
'Pacific/Wallis',
'Pacific/Yap',
'Poland',
'Portugal',
'ROC',
'ROK',
'Singapore',
'Turkey',
'UCT',
'US/Alaska',
'US/Aleutian',
'US/Arizona',
'US/Central',
'US/East-Indiana',
'US/Eastern',
'US/Hawaii',
'US/Indiana-Starke',
'US/Michigan',
'US/Mountain',
'US/Pacific',
'US/Pacific-New',
'US/Samoa',
'UTC',
'Universal',
'W-SU',
'WET',
'Zulu']
all_timezones = LazyList(
tz for tz in all_timezones if resource_exists(tz))
all_timezones_set = LazySet(all_timezones)
common_timezones = \
['Africa/Abidjan',
'Africa/Accra',
'Africa/Addis_Ababa',
'Africa/Algiers',
'Africa/Asmara',
'Africa/Bamako',
'Africa/Bangui',
'Africa/Banjul',
'Africa/Bissau',
'Africa/Blantyre',
'Africa/Brazzaville',
'Africa/Bujumbura',
'Africa/Cairo',
'Africa/Casablanca',
'Africa/Ceuta',
'Africa/Conakry',
'Africa/Dakar',
'Africa/Dar_es_Salaam',
'Africa/Djibouti',
'Africa/Douala',
'Africa/El_Aaiun',
'Africa/Freetown',
'Africa/Gaborone',
'Africa/Harare',
'Africa/Johannesburg',
'Africa/Juba',
'Africa/Kampala',
'Africa/Khartoum',
'Africa/Kigali',
'Africa/Kinshasa',
'Africa/Lagos',
'Africa/Libreville',
'Africa/Lome',
'Africa/Luanda',
'Africa/Lubumbashi',
'Africa/Lusaka',
'Africa/Malabo',
'Africa/Maputo',
'Africa/Maseru',
'Africa/Mbabane',
'Africa/Mogadishu',
'Africa/Monrovia',
'Africa/Nairobi',
'Africa/Ndjamena',
'Africa/Niamey',
'Africa/Nouakchott',
'Africa/Ouagadougou',
'Africa/Porto-Novo',
'Africa/Sao_Tome',
'Africa/Tripoli',
'Africa/Tunis',
'Africa/Windhoek',
'America/Adak',
'America/Anchorage',
'America/Anguilla',
'America/Antigua',
'America/Araguaina',
'America/Argentina/Buenos_Aires',
'America/Argentina/Catamarca',
'America/Argentina/Cordoba',
'America/Argentina/Jujuy',
'America/Argentina/La_Rioja',
'America/Argentina/Mendoza',
'America/Argentina/Rio_Gallegos',
'America/Argentina/Salta',
'America/Argentina/San_Juan',
'America/Argentina/San_Luis',
'America/Argentina/Tucuman',
'America/Argentina/Ushuaia',
'America/Aruba',
'America/Asuncion',
'America/Atikokan',
'America/Bahia',
'America/Bahia_Banderas',
'America/Barbados',
'America/Belem',
'America/Belize',
'America/Blanc-Sablon',
'America/Boa_Vista',
'America/Bogota',
'America/Boise',
'America/Cambridge_Bay',
'America/Campo_Grande',
'America/Cancun',
'America/Caracas',
'America/Cayenne',
'America/Cayman',
'America/Chicago',
'America/Chihuahua',
'America/Costa_Rica',
'America/Creston',
'America/Cuiaba',
'America/Curacao',
'America/Danmarkshavn',
'America/Dawson',
'America/Dawson_Creek',
'America/Denver',
'America/Detroit',
'America/Dominica',
'America/Edmonton',
'America/Eirunepe',
'America/El_Salvador',
'America/Fortaleza',
'America/Glace_Bay',
'America/Godthab',
'America/Goose_Bay',
'America/Grand_Turk',
'America/Grenada',
'America/Guadeloupe',
'America/Guatemala',
'America/Guayaquil',
'America/Guyana',
'America/Halifax',
'America/Havana',
'America/Hermosillo',
'America/Indiana/Indianapolis',
'America/Indiana/Knox',
'America/Indiana/Marengo',
'America/Indiana/Petersburg',
'America/Indiana/Tell_City',
'America/Indiana/Vevay',
'America/Indiana/Vincennes',
'America/Indiana/Winamac',
'America/Inuvik',
'America/Iqaluit',
'America/Jamaica',
'America/Juneau',
'America/Kentucky/Louisville',
'America/Kentucky/Monticello',
'America/Kralendijk',
'America/La_Paz',
'America/Lima',
'America/Los_Angeles',
'America/Lower_Princes',
'America/Maceio',
'America/Managua',
'America/Manaus',
'America/Marigot',
'America/Martinique',
'America/Matamoros',
'America/Mazatlan',
'America/Menominee',
'America/Merida',
'America/Metlakatla',
'America/Mexico_City',
'America/Miquelon',
'America/Moncton',
'America/Monterrey',
'America/Montevideo',
'America/Montreal',
'America/Montserrat',
'America/Nassau',
'America/New_York',
'America/Nipigon',
'America/Nome',
'America/Noronha',
'America/North_Dakota/Beulah',
'America/North_Dakota/Center',
'America/North_Dakota/New_Salem',
'America/Ojinaga',
'America/Panama',
'America/Pangnirtung',
'America/Paramaribo',
'America/Phoenix',
'America/Port-au-Prince',
'America/Port_of_Spain',
'America/Porto_Velho',
'America/Puerto_Rico',
'America/Rainy_River',
'America/Rankin_Inlet',
'America/Recife',
'America/Regina',
'America/Resolute',
'America/Rio_Branco',
'America/Santa_Isabel',
'America/Santarem',
'America/Santiago',
'America/Santo_Domingo',
'America/Sao_Paulo',
'America/Scoresbysund',
'America/Sitka',
'America/St_Barthelemy',
'America/St_Johns',
'America/St_Kitts',
'America/St_Lucia',
'America/St_Thomas',
'America/St_Vincent',
'America/Swift_Current',
'America/Tegucigalpa',
'America/Thule',
'America/Thunder_Bay',
'America/Tijuana',
'America/Toronto',
'America/Tortola',
'America/Vancouver',
'America/Whitehorse',
'America/Winnipeg',
'America/Yakutat',
'America/Yellowknife',
'Antarctica/Casey',
'Antarctica/Davis',
'Antarctica/DumontDUrville',
'Antarctica/Macquarie',
'Antarctica/Mawson',
'Antarctica/McMurdo',
'Antarctica/Palmer',
'Antarctica/Rothera',
'Antarctica/Syowa',
'Antarctica/Troll',
'Antarctica/Vostok',
'Arctic/Longyearbyen',
'Asia/Aden',
'Asia/Almaty',
'Asia/Amman',
'Asia/Anadyr',
'Asia/Aqtau',
'Asia/Aqtobe',
'Asia/Ashgabat',
'Asia/Baghdad',
'Asia/Bahrain',
'Asia/Baku',
'Asia/Bangkok',
'Asia/Beirut',
'Asia/Bishkek',
'Asia/Brunei',
'Asia/Chita',
'Asia/Choibalsan',
'Asia/Colombo',
'Asia/Damascus',
'Asia/Dhaka',
'Asia/Dili',
'Asia/Dubai',
'Asia/Dushanbe',
'Asia/Gaza',
'Asia/Hebron',
'Asia/Ho_Chi_Minh',
'Asia/Hong_Kong',
'Asia/Hovd',
'Asia/Irkutsk',
'Asia/Jakarta',
'Asia/Jayapura',
'Asia/Jerusalem',
'Asia/Kabul',
'Asia/Kamchatka',
'Asia/Karachi',
'Asia/Kathmandu',
'Asia/Khandyga',
'Asia/Kolkata',
'Asia/Krasnoyarsk',
'Asia/Kuala_Lumpur',
'Asia/Kuching',
'Asia/Kuwait',
'Asia/Macau',
'Asia/Magadan',
'Asia/Makassar',
'Asia/Manila',
'Asia/Muscat',
'Asia/Nicosia',
'Asia/Novokuznetsk',
'Asia/Novosibirsk',
'Asia/Omsk',
'Asia/Oral',
'Asia/Phnom_Penh',
'Asia/Pontianak',
'Asia/Pyongyang',
'Asia/Qatar',
'Asia/Qyzylorda',
'Asia/Rangoon',
'Asia/Riyadh',
'Asia/Sakhalin',
'Asia/Samarkand',
'Asia/Seoul',
'Asia/Shanghai',
'Asia/Singapore',
'Asia/Srednekolymsk',
'Asia/Taipei',
'Asia/Tashkent',
'Asia/Tbilisi',
'Asia/Tehran',
'Asia/Thimphu',
'Asia/Tokyo',
'Asia/Ulaanbaatar',
'Asia/Urumqi',
'Asia/Ust-Nera',
'Asia/Vientiane',
'Asia/Vladivostok',
'Asia/Yakutsk',
'Asia/Yekaterinburg',
'Asia/Yerevan',
'Atlantic/Azores',
'Atlantic/Bermuda',
'Atlantic/Canary',
'Atlantic/Cape_Verde',
'Atlantic/Faroe',
'Atlantic/Madeira',
'Atlantic/Reykjavik',
'Atlantic/South_Georgia',
'Atlantic/St_Helena',
'Atlantic/Stanley',
'Australia/Adelaide',
'Australia/Brisbane',
'Australia/Broken_Hill',
'Australia/Currie',
'Australia/Darwin',
'Australia/Eucla',
'Australia/Hobart',
'Australia/Lindeman',
'Australia/Lord_Howe',
'Australia/Melbourne',
'Australia/Perth',
'Australia/Sydney',
'Canada/Atlantic',
'Canada/Central',
'Canada/Eastern',
'Canada/Mountain',
'Canada/Newfoundland',
'Canada/Pacific',
'Europe/Amsterdam',
'Europe/Andorra',
'Europe/Athens',
'Europe/Belgrade',
'Europe/Berlin',
'Europe/Bratislava',
'Europe/Brussels',
'Europe/Bucharest',
'Europe/Budapest',
'Europe/Busingen',
'Europe/Chisinau',
'Europe/Copenhagen',
'Europe/Dublin',
'Europe/Gibraltar',
'Europe/Guernsey',
'Europe/Helsinki',
'Europe/Isle_of_Man',
'Europe/Istanbul',
'Europe/Jersey',
'Europe/Kaliningrad',
'Europe/Kiev',
'Europe/Lisbon',
'Europe/Ljubljana',
'Europe/London',
'Europe/Luxembourg',
'Europe/Madrid',
'Europe/Malta',
'Europe/Mariehamn',
'Europe/Minsk',
'Europe/Monaco',
'Europe/Moscow',
'Europe/Oslo',
'Europe/Paris',
'Europe/Podgorica',
'Europe/Prague',
'Europe/Riga',
'Europe/Rome',
'Europe/Samara',
'Europe/San_Marino',
'Europe/Sarajevo',
'Europe/Simferopol',
'Europe/Skopje',
'Europe/Sofia',
'Europe/Stockholm',
'Europe/Tallinn',
'Europe/Tirane',
'Europe/Uzhgorod',
'Europe/Vaduz',
'Europe/Vatican',
'Europe/Vienna',
'Europe/Vilnius',
'Europe/Volgograd',
'Europe/Warsaw',
'Europe/Zagreb',
'Europe/Zaporozhye',
'Europe/Zurich',
'GMT',
'Indian/Antananarivo',
'Indian/Chagos',
'Indian/Christmas',
'Indian/Cocos',
'Indian/Comoro',
'Indian/Kerguelen',
'Indian/Mahe',
'Indian/Maldives',
'Indian/Mauritius',
'Indian/Mayotte',
'Indian/Reunion',
'Pacific/Apia',
'Pacific/Auckland',
'Pacific/Bougainville',
'Pacific/Chatham',
'Pacific/Chuuk',
'Pacific/Easter',
'Pacific/Efate',
'Pacific/Enderbury',
'Pacific/Fakaofo',
'Pacific/Fiji',
'Pacific/Funafuti',
'Pacific/Galapagos',
'Pacific/Gambier',
'Pacific/Guadalcanal',
'Pacific/Guam',
'Pacific/Honolulu',
'Pacific/Johnston',
'Pacific/Kiritimati',
'Pacific/Kosrae',
'Pacific/Kwajalein',
'Pacific/Majuro',
'Pacific/Marquesas',
'Pacific/Midway',
'Pacific/Nauru',
'Pacific/Niue',
'Pacific/Norfolk',
'Pacific/Noumea',
'Pacific/Pago_Pago',
'Pacific/Palau',
'Pacific/Pitcairn',
'Pacific/Pohnpei',
'Pacific/Port_Moresby',
'Pacific/Rarotonga',
'Pacific/Saipan',
'Pacific/Tahiti',
'Pacific/Tarawa',
'Pacific/Tongatapu',
'Pacific/Wake',
'Pacific/Wallis',
'US/Alaska',
'US/Arizona',
'US/Central',
'US/Eastern',
'US/Hawaii',
'US/Mountain',
'US/Pacific',
'UTC']
common_timezones = LazyList(
tz for tz in common_timezones if tz in all_timezones)
common_timezones_set = LazySet(common_timezones)
| gpl-3.0 | -7,961,348,532,706,557,000 | 21.47918 | 81 | 0.652583 | false |
shepdelacreme/ansible | lib/ansible/modules/network/aos/_aos_login.py | 44 | 4110 | #!/usr/bin/python
#
# (c) 2017 Apstra Inc, <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['deprecated'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: aos_login
author: [email protected] (@jeremyschulman)
version_added: "2.3"
short_description: Login to AOS server for session token
deprecated:
removed_in: "2.9"
why: This module does not support AOS 2.1 or later
alternative: See new modules at U(https://www.ansible.com/ansible-apstra).
description:
- Obtain the AOS server session token by providing the required
username and password credentials. Upon successful authentication,
this module will return the session-token that is required by all
subsequent AOS module usage. On success the module will automatically populate
ansible facts with the variable I(aos_session)
This module is not idempotent and do not support check mode.
requirements:
- "aos-pyez >= 0.6.1"
options:
server:
description:
- Address of the AOS Server on which you want to open a connection.
required: true
port:
description:
- Port number to use when connecting to the AOS server.
default: 443
user:
description:
- Login username to use when connecting to the AOS server.
default: admin
passwd:
description:
- Password to use when connecting to the AOS server.
default: admin
'''
EXAMPLES = '''
- name: Create a session with the AOS-server
aos_login:
server: "{{ inventory_hostname }}"
user: admin
passwd: admin
- name: Use the newly created session (register is not mandatory)
aos_ip_pool:
session: "{{ aos_session }}"
name: my_ip_pool
state: present
'''
RETURNS = '''
aos_session:
description: Authenticated session information
returned: always
type: dict
sample: { 'url': <str>, 'headers': {...} }
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.network.aos.aos import check_aos_version
try:
from apstra.aosom.session import Session
import apstra.aosom.exc as aosExc
HAS_AOS_PYEZ = True
except ImportError:
HAS_AOS_PYEZ = False
def aos_login(module):
mod_args = module.params
aos = Session(server=mod_args['server'], port=mod_args['port'],
user=mod_args['user'], passwd=mod_args['passwd'])
try:
aos.login()
except aosExc.LoginServerUnreachableError:
module.fail_json(
msg="AOS-server [%s] API not available/reachable, check server" % aos.server)
except aosExc.LoginAuthError:
module.fail_json(msg="AOS-server login credentials failed")
module.exit_json(changed=False,
ansible_facts=dict(aos_session=aos.session),
aos_session=dict(aos_session=aos.session))
def main():
module = AnsibleModule(
argument_spec=dict(
server=dict(required=True),
port=dict(default='443', type="int"),
user=dict(default='admin'),
passwd=dict(default='admin', no_log=True)))
if not HAS_AOS_PYEZ:
module.fail_json(msg='aos-pyez is not installed. Please see details '
'here: https://github.com/Apstra/aos-pyez')
# Check if aos-pyez is present and match the minimum version
check_aos_version(module, '0.6.1')
aos_login(module)
if __name__ == '__main__':
main()
| gpl-3.0 | -2,178,526,695,427,118,600 | 28.568345 | 89 | 0.66983 | false |
lmtim/iOSBlogCN | Export.py | 65 | 1482 | __author__ = 'wwxiang'
#coding=utf-8
import os
import re
work = os.getcwd()
resxml = work + os.path.sep + 'blogcn.opml'
workmd = work + os.path.sep + 'README.md'
def handler():
isblock = True
handlerData = []
lineNo = 0
try:
with open(workmd,'rb') as linefs:
lineCout = len(linefs.readlines())
linefs.close()
with open(workmd,'rb') as fs:
while isblock:
lineNo += 1
val = fs.readline().decode()
if lineNo == lineCout:
isblock = False
if not val[0] == '[':
continue
title = re.findall(r'\[(.+?)\]',val)[0]
xmlUrl = re.findall(r'<(.+?)>',val)[0]
htmlUrl = re.findall(r'\((.+?)\)',val)[0]
handlerData.append('<outline text="{0}" title="{0}" type="rss" xmlUrl="{1}" htmlUrl="{2}"/>'.format(title,xmlUrl,htmlUrl))
fs.close()
except:
print('错误处理','读取文件失败')
return
export_xml = '<?xml version="1.0" encoding="UTF-8"?><opml version="1.0"><head><title>导出订阅</title></head><body><outline text="ios" title="ios" >\n'
export_xml += '\r\n'.join(handlerData)
export_xml += '</outline></body></opml>\r\n'
with open(resxml,'wb') as fs:
fs.write(export_xml.encode())
fs.close()
print('res.xml文件处理完成')
pass
if os.path.isfile(workmd):
handler()
| gpl-2.0 | -4,641,633,338,557,022,000 | 30.347826 | 150 | 0.507628 | false |
ddzialak/boto | boto/cloudsearch2/search.py | 16 | 13430 | # Copyright (c) 2014 Amazon.com, Inc. or its affiliates.
# All Rights Reserved
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
from math import ceil
from boto.compat import json, map, six
import requests
SIMPLE = 'simple'
STRUCTURED = 'structured'
LUCENE = 'lucene'
DISMAX = 'dismax'
class SearchServiceException(Exception):
pass
class SearchResults(object):
def __init__(self, **attrs):
self.rid = attrs['status']['rid']
self.time_ms = attrs['status']['time-ms']
self.hits = attrs['hits']['found']
self.docs = attrs['hits']['hit']
self.start = attrs['hits']['start']
self.query = attrs['query']
self.search_service = attrs['search_service']
self.facets = {}
if 'facets' in attrs:
for (facet, values) in attrs['facets'].items():
if 'buckets' in values:
self.facets[facet] = dict((k, v) for (k, v) in map(lambda x: (x['value'], x['count']), values.get('buckets', [])))
self.num_pages_needed = ceil(self.hits / self.query.real_size)
def __len__(self):
return len(self.docs)
def __iter__(self):
return iter(self.docs)
def next_page(self):
"""Call Cloudsearch to get the next page of search results
:rtype: :class:`boto.cloudsearch2.search.SearchResults`
:return: the following page of search results
"""
if self.query.page <= self.num_pages_needed:
self.query.start += self.query.real_size
self.query.page += 1
return self.search_service(self.query)
else:
raise StopIteration
class Query(object):
RESULTS_PER_PAGE = 500
def __init__(self, q=None, parser=None, fq=None, expr=None,
return_fields=None, size=10, start=0, sort=None,
facet=None, highlight=None, partial=None, options=None):
self.q = q
self.parser = parser
self.fq = fq
self.expr = expr or {}
self.sort = sort or []
self.return_fields = return_fields or []
self.start = start
self.facet = facet or {}
self.highlight = highlight or {}
self.partial = partial
self.options = options
self.page = 0
self.update_size(size)
def update_size(self, new_size):
self.size = new_size
self.real_size = Query.RESULTS_PER_PAGE if (self.size >
Query.RESULTS_PER_PAGE or self.size == 0) else self.size
def to_params(self):
"""Transform search parameters from instance properties to a dictionary
:rtype: dict
:return: search parameters
"""
params = {'start': self.start, 'size': self.real_size}
if self.q:
params['q'] = self.q
if self.parser:
params['q.parser'] = self.parser
if self.fq:
params['fq'] = self.fq
if self.expr:
for k, v in six.iteritems(self.expr):
params['expr.%s' % k] = v
if self.facet:
for k, v in six.iteritems(self.facet):
if not isinstance(v, six.string_types):
v = json.dumps(v)
params['facet.%s' % k] = v
if self.highlight:
for k, v in six.iteritems(self.highlight):
params['highlight.%s' % k] = v
if self.options:
params['q.options'] = self.options
if self.return_fields:
params['return'] = ','.join(self.return_fields)
if self.partial is not None:
params['partial'] = self.partial
if self.sort:
params['sort'] = ','.join(self.sort)
return params
class SearchConnection(object):
def __init__(self, domain=None, endpoint=None):
self.domain = domain
self.endpoint = endpoint
self.session = requests.Session()
# Copy proxy settings from connection
if self.domain and self.domain.layer1 and self.domain.layer1.use_proxy:
self.session.proxies['http'] = self.domain.layer1.get_proxy_url_with_auth()
if not endpoint:
self.endpoint = domain.search_service_endpoint
def build_query(self, q=None, parser=None, fq=None, rank=None, return_fields=None,
size=10, start=0, facet=None, highlight=None, sort=None,
partial=None, options=None):
return Query(q=q, parser=parser, fq=fq, expr=rank, return_fields=return_fields,
size=size, start=start, facet=facet, highlight=highlight,
sort=sort, partial=partial, options=options)
def search(self, q=None, parser=None, fq=None, rank=None, return_fields=None,
size=10, start=0, facet=None, highlight=None, sort=None, partial=None,
options=None):
"""
Send a query to CloudSearch
Each search query should use at least the q or bq argument to specify
the search parameter. The other options are used to specify the
criteria of the search.
:type q: string
:param q: A string to search the default search fields for.
:type parser: string
:param parser: The parser to use. 'simple', 'structured', 'lucene', 'dismax'
:type fq: string
:param fq: The filter query to use.
:type sort: List of strings
:param sort: A list of fields or rank expressions used to order the
search results. Order is handled by adding 'desc' or 'asc' after the field name.
``['year desc', 'author asc']``
:type return_fields: List of strings
:param return_fields: A list of fields which should be returned by the
search. If this field is not specified, only IDs will be returned.
``['headline']``
:type size: int
:param size: Number of search results to specify
:type start: int
:param start: Offset of the first search result to return (can be used
for paging)
:type facet: dict
:param facet: Dictionary of fields for which facets should be returned
The facet value is string of JSON options
``{'year': '{sort:"bucket", size:3}', 'genres': '{buckets:["Action","Adventure","Sci-Fi"]}'}``
:type highlight: dict
:param highlight: Dictionary of fields for which highlights should be returned
The facet value is string of JSON options
``{'genres': '{format:'text',max_phrases:2,pre_tag:'<b>',post_tag:'</b>'}'}``
:type partial: bool
:param partial: Should partial results from a partioned service be returned if
one or more index partitions are unreachable.
:type options: str
:param options: Options for the query parser specified in *parser*.
Specified as a string in JSON format.
``{fields: ['title^5', 'description']}``
:rtype: :class:`boto.cloudsearch2.search.SearchResults`
:return: Returns the results of this search
The following examples all assume we have indexed a set of documents
with fields: *author*, *date*, *headline*
A simple search will look for documents whose default text search
fields will contain the search word exactly:
>>> search(q='Tim') # Return documents with the word Tim in them (but not Timothy)
A simple search with more keywords will return documents whose default
text search fields contain the search strings together or separately.
>>> search(q='Tim apple') # Will match "tim" and "apple"
More complex searches require the boolean search operator.
Wildcard searches can be used to search for any words that start with
the search string.
>>> search(q="'Tim*'") # Return documents with words like Tim or Timothy)
Search terms can also be combined. Allowed operators are "and", "or",
"not", "field", "optional", "token", "phrase", or "filter"
>>> search(q="(and 'Tim' (field author 'John Smith'))", parser='structured')
Facets allow you to show classification information about the search
results. For example, you can retrieve the authors who have written
about Tim with a max of 3
>>> search(q='Tim', facet={'Author': '{sort:"bucket", size:3}'})
"""
query = self.build_query(q=q, parser=parser, fq=fq, rank=rank,
return_fields=return_fields,
size=size, start=start, facet=facet,
highlight=highlight, sort=sort,
partial=partial, options=options)
return self(query)
def __call__(self, query):
"""Make a call to CloudSearch
:type query: :class:`boto.cloudsearch2.search.Query`
:param query: A group of search criteria
:rtype: :class:`boto.cloudsearch2.search.SearchResults`
:return: search results
"""
api_version = '2013-01-01'
if self.domain:
api_version = self.domain.layer1.APIVersion
url = "http://%s/%s/search" % (self.endpoint, api_version)
params = query.to_params()
r = self.session.get(url, params=params)
_body = r.content.decode('utf-8')
try:
data = json.loads(_body)
except ValueError:
if r.status_code == 403:
msg = ''
import re
g = re.search('<html><body><h1>403 Forbidden</h1>([^<]+)<', _body)
try:
msg = ': %s' % (g.groups()[0].strip())
except AttributeError:
pass
raise SearchServiceException('Authentication error from Amazon%s' % msg)
raise SearchServiceException("Got non-json response from Amazon. %s" % _body, query)
if 'messages' in data and 'error' in data:
for m in data['messages']:
if m['severity'] == 'fatal':
raise SearchServiceException("Error processing search %s "
"=> %s" % (params, m['message']), query)
elif 'error' in data:
raise SearchServiceException("Unknown error processing search %s"
% json.dumps(data), query)
data['query'] = query
data['search_service'] = self
return SearchResults(**data)
def get_all_paged(self, query, per_page):
"""Get a generator to iterate over all pages of search results
:type query: :class:`boto.cloudsearch2.search.Query`
:param query: A group of search criteria
:type per_page: int
:param per_page: Number of docs in each :class:`boto.cloudsearch2.search.SearchResults` object.
:rtype: generator
:return: Generator containing :class:`boto.cloudsearch2.search.SearchResults`
"""
query.update_size(per_page)
page = 0
num_pages_needed = 0
while page <= num_pages_needed:
results = self(query)
num_pages_needed = results.num_pages_needed
yield results
query.start += query.real_size
page += 1
def get_all_hits(self, query):
"""Get a generator to iterate over all search results
Transparently handles the results paging from Cloudsearch
search results so even if you have many thousands of results
you can iterate over all results in a reasonably efficient
manner.
:type query: :class:`boto.cloudsearch2.search.Query`
:param query: A group of search criteria
:rtype: generator
:return: All docs matching query
"""
page = 0
num_pages_needed = 0
while page <= num_pages_needed:
results = self(query)
num_pages_needed = results.num_pages_needed
for doc in results:
yield doc
query.start += query.real_size
page += 1
def get_num_hits(self, query):
"""Return the total number of hits for query
:type query: :class:`boto.cloudsearch2.search.Query`
:param query: a group of search criteria
:rtype: int
:return: Total number of hits for query
"""
query.update_size(1)
return self(query).hits
| mit | -2,765,473,993,947,969,000 | 35.594005 | 134 | 0.595905 | false |
Sylrob434/CouchPotatoServer | libs/CodernityDB/debug_stuff.py | 44 | 7678 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2011-2013 Codernity (http://codernity.com)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from CodernityDB.tree_index import TreeBasedIndex
import struct
import os
import inspect
from functools import wraps
import json
class DebugTreeBasedIndex(TreeBasedIndex):
def __init__(self, *args, **kwargs):
super(DebugTreeBasedIndex, self).__init__(*args, **kwargs)
def print_tree(self):
print '-----CURRENT TREE-----'
print self.root_flag
if self.root_flag == 'l':
print '---ROOT---'
self._print_leaf_data(self.data_start)
return
else:
print '---ROOT---'
self._print_node_data(self.data_start)
nr_of_el, children_flag = self._read_node_nr_of_elements_and_children_flag(
self.data_start)
nodes = []
for index in range(nr_of_el):
l_pointer, key, r_pointer = self._read_single_node_key(
self.data_start, index)
nodes.append(l_pointer)
nodes.append(r_pointer)
print 'ROOT NODES', nodes
while children_flag == 'n':
self._print_level(nodes, 'n')
new_nodes = []
for node in nodes:
nr_of_el, children_flag = \
self._read_node_nr_of_elements_and_children_flag(node)
for index in range(nr_of_el):
l_pointer, key, r_pointer = self._read_single_node_key(
node, index)
new_nodes.append(l_pointer)
new_nodes.append(r_pointer)
nodes = new_nodes
self._print_level(nodes, 'l')
def _print_level(self, nodes, flag):
print '---NEXT LVL---'
if flag == 'n':
for node in nodes:
self._print_node_data(node)
elif flag == 'l':
for node in nodes:
self._print_leaf_data(node)
def _print_leaf_data(self, leaf_start_position):
print 'printing data of leaf at', leaf_start_position
nr_of_elements = self._read_leaf_nr_of_elements(leaf_start_position)
self.buckets.seek(leaf_start_position)
data = self.buckets.read(self.leaf_heading_size +
nr_of_elements * self.single_leaf_record_size)
leaf = struct.unpack('<' + self.leaf_heading_format +
nr_of_elements * self.single_leaf_record_format, data)
print leaf
print
def _print_node_data(self, node_start_position):
print 'printing data of node at', node_start_position
nr_of_elements = self._read_node_nr_of_elements_and_children_flag(
node_start_position)[0]
self.buckets.seek(node_start_position)
data = self.buckets.read(self.node_heading_size + self.pointer_size
+ nr_of_elements * (self.key_size + self.pointer_size))
node = struct.unpack('<' + self.node_heading_format + self.pointer_format
+ nr_of_elements * (
self.key_format + self.pointer_format),
data)
print node
print
# ------------------>
def database_step_by_step(db_obj, path=None):
if not path:
# ugly for multiplatform support....
p = db_obj.path
p1 = os.path.split(p)
p2 = os.path.split(p1[0])
p3 = '_'.join([p2[1], 'operation_logger.log'])
path = os.path.join(os.path.split(p2[0])[0], p3)
f_obj = open(path, 'wb')
__stack = [] # inspect.stack() is not working on pytest etc
def remove_from_stack(name):
for i in range(len(__stack)):
if __stack[-i] == name:
__stack.pop(-i)
def __dumper(f):
@wraps(f)
def __inner(*args, **kwargs):
funct_name = f.__name__
if funct_name == 'count':
name = args[0].__name__
meth_args = (name,) + args[1:]
elif funct_name in ('reindex_index', 'compact_index'):
name = args[0].name
meth_args = (name,) + args[1:]
else:
meth_args = args
kwargs_copy = kwargs.copy()
res = None
__stack.append(funct_name)
if funct_name == 'insert':
try:
res = f(*args, **kwargs)
except:
packed = json.dumps((funct_name,
meth_args, kwargs_copy, None))
f_obj.write('%s\n' % packed)
f_obj.flush()
raise
else:
packed = json.dumps((funct_name,
meth_args, kwargs_copy, res))
f_obj.write('%s\n' % packed)
f_obj.flush()
else:
if funct_name == 'get':
for curr in __stack:
if ('delete' in curr or 'update' in curr) and not curr.startswith('test'):
remove_from_stack(funct_name)
return f(*args, **kwargs)
packed = json.dumps((funct_name, meth_args, kwargs_copy))
f_obj.write('%s\n' % packed)
f_obj.flush()
res = f(*args, **kwargs)
remove_from_stack(funct_name)
return res
return __inner
for meth_name, meth_f in inspect.getmembers(db_obj, predicate=inspect.ismethod):
if not meth_name.startswith('_'):
setattr(db_obj, meth_name, __dumper(meth_f))
setattr(db_obj, 'operation_logger', f_obj)
def database_from_steps(db_obj, path):
# db_obj.insert=lambda data : insert_for_debug(db_obj, data)
with open(path, 'rb') as f_obj:
for current in f_obj:
line = json.loads(current[:-1])
if line[0] == 'count':
obj = getattr(db_obj, line[1][0])
line[1] = [obj] + line[1][1:]
name = line[0]
if name == 'insert':
try:
line[1][0].pop('_rev')
except:
pass
elif name in ('delete', 'update'):
el = db_obj.get('id', line[1][0]['_id'])
line[1][0]['_rev'] = el['_rev']
# print 'FROM STEPS doing', line
meth = getattr(db_obj, line[0], None)
if not meth:
raise Exception("Method = `%s` not found" % line[0])
meth(*line[1], **line[2])
# def insert_for_debug(self, data):
#
# _rev = data['_rev']
#
# if not '_id' in data:
# _id = uuid4().hex
# else:
# _id = data['_id']
# data['_id'] = _id
# try:
# _id = bytes(_id)
# except:
# raise DatabaseException("`_id` must be valid bytes object")
# self._insert_indexes(_id, _rev, data)
# ret = {'_id': _id, '_rev': _rev}
# data.update(ret)
# return ret
| gpl-3.0 | 1,710,699,627,838,807,000 | 35.388626 | 98 | 0.502084 | false |
cdepman/falcon_api | site-packages/wheel/test/test_install.py | 455 | 1866 | # Test wheel.
# The file has the following contents:
# hello.pyd
# hello/hello.py
# hello/__init__.py
# test-1.0.data/data/hello.dat
# test-1.0.data/headers/hello.dat
# test-1.0.data/scripts/hello.sh
# test-1.0.dist-info/WHEEL
# test-1.0.dist-info/METADATA
# test-1.0.dist-info/RECORD
# The root is PLATLIB
# So, some in PLATLIB, and one in each of DATA, HEADERS and SCRIPTS.
import wheel.tool
import wheel.pep425tags
from wheel.install import WheelFile
from tempfile import mkdtemp
import shutil
import os
THISDIR = os.path.dirname(__file__)
TESTWHEEL = os.path.join(THISDIR, 'test-1.0-py2.py3-none-win32.whl')
def check(*path):
return os.path.exists(os.path.join(*path))
def test_install():
tempdir = mkdtemp()
def get_supported():
return list(wheel.pep425tags.get_supported()) + [('py3', 'none', 'win32')]
whl = WheelFile(TESTWHEEL, context=get_supported)
assert whl.supports_current_python(get_supported)
try:
locs = {}
for key in ('purelib', 'platlib', 'scripts', 'headers', 'data'):
locs[key] = os.path.join(tempdir, key)
os.mkdir(locs[key])
whl.install(overrides=locs)
assert len(os.listdir(locs['purelib'])) == 0
assert check(locs['platlib'], 'hello.pyd')
assert check(locs['platlib'], 'hello', 'hello.py')
assert check(locs['platlib'], 'hello', '__init__.py')
assert check(locs['data'], 'hello.dat')
assert check(locs['headers'], 'hello.dat')
assert check(locs['scripts'], 'hello.sh')
assert check(locs['platlib'], 'test-1.0.dist-info', 'RECORD')
finally:
shutil.rmtree(tempdir)
def test_install_tool():
"""Slightly improve coverage of wheel.install"""
wheel.tool.install([TESTWHEEL], force=True, dry_run=True)
| mit | 4,381,787,573,899,417,000 | 31.963636 | 82 | 0.623258 | false |
bcornwellmott/erpnext | erpnext/hr/report/employee_birthday/employee_birthday.py | 120 | 1328 | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
from frappe import _
from frappe.utils import flt
def execute(filters=None):
if not filters: filters = {}
columns = get_columns()
data = get_employees(filters)
return columns, data
def get_columns():
return [
_("Employee") + ":Link/Employee:120", _("Name") + ":Data:200", _("Date of Birth")+ ":Date:100",
_("Branch") + ":Link/Branch:120", _("Department") + ":Link/Department:120",
_("Designation") + ":Link/Designation:120", _("Gender") + "::60", _("Company") + ":Link/Company:120"
]
def get_employees(filters):
conditions = get_conditions(filters)
return frappe.db.sql("""select name, employee_name, date_of_birth,
branch, department, designation,
gender, company from tabEmployee where status = 'Active' %s""" % conditions, as_list=1)
def get_conditions(filters):
conditions = ""
if filters.get("month"):
month = ["Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov",
"Dec"].index(filters["month"]) + 1
conditions += " and month(date_of_birth) = '%s'" % month
if filters.get("company"): conditions += " and company = '%s'" % \
filters["company"].replace("'", "\\'")
return conditions
| gpl-3.0 | 6,694,553,669,898,430,000 | 32.2 | 102 | 0.652108 | false |
notmyname/swift | test/unit/common/test_exceptions.py | 51 | 1959 | # Copyright (c) 2010-2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# TODO(creiht): Tests
import unittest
from swift.common import exceptions
class TestExceptions(unittest.TestCase):
def test_replication_exception(self):
self.assertEqual(str(exceptions.ReplicationException()), '')
self.assertEqual(str(exceptions.ReplicationException('test')), 'test')
def test_replication_lock_timeout(self):
exc = exceptions.ReplicationLockTimeout(15, 'test')
try:
self.assertTrue(isinstance(exc, exceptions.MessageTimeout))
finally:
exc.cancel()
def test_client_exception(self):
strerror = 'test: HTTP://random:888/randompath?foo=1 666 reason: ' \
'device /sdb1 content'
exc = exceptions.ClientException('test', http_scheme='HTTP',
http_host='random',
http_port=888,
http_path='/randompath',
http_query='foo=1',
http_status=666,
http_reason='reason',
http_device='/sdb1',
http_response_content='content')
self.assertEqual(str(exc), strerror)
if __name__ == '__main__':
unittest.main()
| apache-2.0 | 80,773,942,721,669,070 | 38.18 | 78 | 0.58193 | false |
otherness-space/myProject002 | my_project_002/lib/python2.7/site-packages/django/contrib/contenttypes/management.py | 96 | 2903 | from django.contrib.contenttypes.models import ContentType
from django.db import DEFAULT_DB_ALIAS, router
from django.db.models import get_apps, get_models, signals
from django.utils.encoding import smart_text
from django.utils import six
from django.utils.six.moves import input
def update_contenttypes(app, created_models, verbosity=2, db=DEFAULT_DB_ALIAS, **kwargs):
"""
Creates content types for models in the given app, removing any model
entries that no longer have a matching model class.
"""
if not router.allow_syncdb(db, ContentType):
return
ContentType.objects.clear_cache()
app_models = get_models(app)
if not app_models:
return
# They all have the same app_label, get the first one.
app_label = app_models[0]._meta.app_label
app_models = dict(
(model._meta.object_name.lower(), model)
for model in app_models
)
# Get all the content types
content_types = dict(
(ct.model, ct)
for ct in ContentType.objects.using(db).filter(app_label=app_label)
)
to_remove = [
ct
for (model_name, ct) in six.iteritems(content_types)
if model_name not in app_models
]
cts = [
ContentType(
name=smart_text(model._meta.verbose_name_raw),
app_label=app_label,
model=model_name,
)
for (model_name, model) in six.iteritems(app_models)
if model_name not in content_types
]
ContentType.objects.using(db).bulk_create(cts)
if verbosity >= 2:
for ct in cts:
print("Adding content type '%s | %s'" % (ct.app_label, ct.model))
# Confirm that the content type is stale before deletion.
if to_remove:
if kwargs.get('interactive', False):
content_type_display = '\n'.join([
' %s | %s' % (ct.app_label, ct.model)
for ct in to_remove
])
ok_to_delete = input("""The following content types are stale and need to be deleted:
%s
Any objects related to these content types by a foreign key will also
be deleted. Are you sure you want to delete these content types?
If you're unsure, answer 'no'.
Type 'yes' to continue, or 'no' to cancel: """ % content_type_display)
else:
ok_to_delete = False
if ok_to_delete == 'yes':
for ct in to_remove:
if verbosity >= 2:
print("Deleting stale content type '%s | %s'" % (ct.app_label, ct.model))
ct.delete()
else:
if verbosity >= 2:
print("Stale content types remain.")
def update_all_contenttypes(verbosity=2, **kwargs):
for app in get_apps():
update_contenttypes(app, None, verbosity, **kwargs)
signals.post_syncdb.connect(update_contenttypes)
if __name__ == "__main__":
update_all_contenttypes()
| mit | -7,901,137,522,889,848,000 | 31.617978 | 97 | 0.612125 | false |
onceuponatimeforever/oh-mainline | vendor/packages/bleach/bleach/tests/test_basics.py | 21 | 5094 | import html5lib
from nose.tools import eq_
import bleach
def test_empty():
eq_('', bleach.clean(''))
def test_comments_only():
comment = '<!-- this is a comment -->'
open_comment = '<!-- this is an open comment'
eq_('', bleach.clean(comment))
eq_('', bleach.clean(open_comment))
eq_(comment, bleach.clean(comment, strip_comments=False))
eq_('%s-->' % open_comment, bleach.clean(open_comment,
strip_comments=False))
def test_with_comments():
html = '<!-- comment -->Just text'
eq_('Just text', bleach.clean(html))
eq_(html, bleach.clean(html, strip_comments=False))
def test_no_html():
eq_('no html string', bleach.clean('no html string'))
def test_allowed_html():
eq_('an <strong>allowed</strong> tag',
bleach.clean('an <strong>allowed</strong> tag'))
eq_('another <em>good</em> tag',
bleach.clean('another <em>good</em> tag'))
def test_bad_html():
eq_('a <em>fixed tag</em>',
bleach.clean('a <em>fixed tag'))
def test_function_arguments():
TAGS = ['span', 'br']
ATTRS = {'span': ['style']}
eq_('a <br><span style="">test</span>',
bleach.clean('a <br/><span style="color:red">test</span>',
tags=TAGS, attributes=ATTRS))
def test_named_arguments():
ATTRS = {'a': ['rel', 'href']}
s = u'<a href="http://xx.com" rel="alternate">xx.com</a>'
eq_('<a href="http://xx.com">xx.com</a>', bleach.clean(s))
eq_(s, bleach.clean(s, attributes=ATTRS))
def test_disallowed_html():
eq_('a <script>safe()</script> test',
bleach.clean('a <script>safe()</script> test'))
eq_('a <style>body{}</style> test',
bleach.clean('a <style>body{}</style> test'))
def test_bad_href():
eq_('<em>no link</em>',
bleach.clean('<em href="fail">no link</em>'))
def test_bare_entities():
eq_('an & entity', bleach.clean('an & entity'))
eq_('an < entity', bleach.clean('an < entity'))
eq_('tag < <em>and</em> entity',
bleach.clean('tag < <em>and</em> entity'))
eq_('&', bleach.clean('&'))
def test_escaped_entities():
s = u'<em>strong</em>'
eq_(s, bleach.clean(s))
def test_serializer():
s = u'<table></table>'
eq_(s, bleach.clean(s, tags=['table']))
eq_(u'test<table></table>', bleach.linkify(u'<table>test</table>'))
eq_(u'<p>test</p>', bleach.clean(u'<p>test</p>', tags=['p']))
def test_no_href_links():
s = u'<a name="anchor">x</a>'
eq_(s, bleach.linkify(s))
eq_(s, bleach.linkify(s, nofollow=False))
def test_weird_strings():
s = '</3'
eq_(bleach.clean(s), '')
def test_xml_render():
parser = html5lib.HTMLParser()
eq_(bleach._render(parser.parseFragment('')), '')
def test_stripping():
eq_('a test <em>with</em> <b>html</b> tags',
bleach.clean('a test <em>with</em> <b>html</b> tags', strip=True))
eq_('a test <em>with</em> <b>html</b> tags',
bleach.clean('a test <em>with</em> <img src="http://example.com/"> '
'<b>html</b> tags', strip=True))
s = '<p><a href="http://example.com/">link text</a></p>'
eq_('<p>link text</p>', bleach.clean(s, tags=['p'], strip=True))
s = '<p><span>multiply <span>nested <span>text</span></span></span></p>'
eq_('<p>multiply nested text</p>', bleach.clean(s, tags=['p'], strip=True))
s = ('<p><a href="http://example.com/"><img src="http://example.com/">'
'</a></p>')
eq_('<p><a href="http://example.com/"></a></p>',
bleach.clean(s, tags=['p', 'a'], strip=True))
def test_allowed_styles():
ATTR = ['style']
STYLE = ['color']
blank = '<b style=""></b>'
s = '<b style="color: blue;"></b>'
eq_(blank, bleach.clean('<b style="top:0"></b>', attributes=ATTR))
eq_(s, bleach.clean(s, attributes=ATTR, styles=STYLE))
eq_(s, bleach.clean('<b style="top: 0; color: blue;"></b>',
attributes=ATTR, styles=STYLE))
def test_idempotent():
"""Make sure that applying the filter twice doesn't change anything."""
dirty = u'<span>invalid & </span> < extra http://link.com<em>'
clean = bleach.clean(dirty)
eq_(clean, bleach.clean(clean))
linked = bleach.linkify(dirty)
eq_(linked, bleach.linkify(linked))
def test_lowercase_html():
"""We should output lowercase HTML."""
dirty = u'<EM CLASS="FOO">BAR</EM>'
clean = u'<em class="FOO">BAR</em>'
eq_(clean, bleach.clean(dirty, attributes=['class']))
def test_wildcard_attributes():
ATTR = {
'*': ['id'],
'img': ['src'],
}
TAG = ['img', 'em']
dirty = (u'both <em id="foo" style="color: black">can</em> have '
u'<img id="bar" src="foo"/>')
clean = u'both <em id="foo">can</em> have <img id="bar" src="foo">'
eq_(clean, bleach.clean(dirty, tags=TAG, attributes=ATTR))
def test_sarcasm():
"""Jokes should crash.<sarcasm/>"""
dirty = u'Yeah right <sarcasm/>'
clean = u'Yeah right <sarcasm/>'
eq_(clean, bleach.clean(dirty))
| agpl-3.0 | 8,802,816,271,392,228,000 | 28.964706 | 79 | 0.56223 | false |
blueyed/ycmd | ycmd/request_validation.py | 29 | 2422 | #!/usr/bin/env python
#
# Copyright (C) 2014 Google Inc.
#
# This file is part of YouCompleteMe.
#
# YouCompleteMe is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# YouCompleteMe is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with YouCompleteMe. If not, see <http://www.gnu.org/licenses/>.
from ycmd.responses import ServerError
# Throws an exception if request doesn't have all the required fields.
# TODO: Accept a request_type param so that we can also verify missing
# command_arguments and completer_target fields if necessary.
def EnsureRequestValid( request_json ):
required_fields = set(
[ 'line_num', 'column_num', 'filepath', 'file_data' ] )
missing = set( x for x in required_fields if x not in request_json )
if 'filepath' not in missing and 'file_data' not in missing:
missing.update( _MissingFieldsForFileData( request_json ) )
if not missing:
return True
message = '\n'.join( _FieldMissingMessage( field ) for field in missing )
raise ServerError( message )
def _FieldMissingMessage( field ):
return 'Request missing required field: {0}'.format( field )
def _FilepathInFileDataSpec( request_json ):
return 'file_data["{0}"]'.format( request_json[ 'filepath' ] )
def _SingleFileDataFieldSpec( request_json, field ):
return '{0}["{1}"]'.format( _FilepathInFileDataSpec( request_json ), field )
def _MissingFieldsForFileData( request_json ):
missing = set()
data_for_file = request_json[ 'file_data' ].get( request_json[ 'filepath' ] )
if data_for_file:
required_data = [ 'filetypes', 'contents' ]
for required in required_data:
if required not in data_for_file:
missing.add( _SingleFileDataFieldSpec( request_json, required ) )
filetypes = data_for_file.get( 'filetypes', [] )
if not filetypes:
missing.add( '{0}[0]'.format(
_SingleFileDataFieldSpec( request_json, 'filetypes' ) ) )
else:
missing.add( _FilepathInFileDataSpec( request_json ) )
return missing
| gpl-3.0 | -664,107,804,409,604,200 | 36.84375 | 79 | 0.714699 | false |
nolanliou/tensorflow | tensorflow/python/kernel_tests/sparse_conditional_accumulator_test.py | 132 | 22955 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import time
import numpy as np
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes as dtypes_lib
from tensorflow.python.framework import errors_impl
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import data_flow_ops
from tensorflow.python.platform import test
def _indexedslice(x, noshape=False):
x = np.array(x)
dense_shape = x.shape
ndim = len(dense_shape)
indices = np.where(np.sum(x, tuple(range(1, ndim))))[0]
values = x[indices]
if noshape:
dense_shape = None
return ops.IndexedSlices(
indices=indices.tolist(), values=values, dense_shape=dense_shape)
class IndexedSlicesConditionalAccumulatorTest(test.TestCase):
def _assertEqual_indexedslices(self, expected_tensor, result):
self.assertAllEqual(expected_tensor.indices, result.indices)
self.assertAllEqual(expected_tensor.values, result.values)
if (result.dense_shape is not None and
expected_tensor.dense_shape is not None):
self.assertAllEqual(expected_tensor.dense_shape, result.dense_shape)
def _assertEqual_nparray(self, expected_array, result, sess):
expected_tensor = _indexedslice(expected_array)
self._assertEqual_indexedslices(expected_tensor, result)
def testConstructor(self):
with ops.Graph().as_default():
q = data_flow_ops.SparseConditionalAccumulator(
dtypes_lib.float32, name="Q")
self.assertTrue(isinstance(q.accumulator_ref, ops.Tensor))
self.assertProtoEquals("""
name:'Q' op:'SparseConditionalAccumulator'
attr { key: 'dtype' value { type: DT_FLOAT } }
attr { key: 'shape' value { shape { unknown_rank: true} } }
attr { key: 'container' value { s: '' } }
attr { key: 'shared_name' value { s: '' } }
""", q.accumulator_ref.op.node_def)
def testConstructorWithShape(self):
with ops.Graph().as_default():
q = data_flow_ops.SparseConditionalAccumulator(
dtypes_lib.float32,
name="Q",
shape=tensor_shape.TensorShape([1, 5, 2, 8]))
self.assertTrue(isinstance(q.accumulator_ref, ops.Tensor))
self.assertProtoEquals("""
name:'Q' op:'SparseConditionalAccumulator'
attr { key: 'dtype' value { type: DT_FLOAT } }
attr { key: 'shape' value { shape { dim {size: 1 }
dim {size: 5 }
dim {size: 2 }
dim {size: 8 }
} } }
attr { key: 'container' value { s: '' } }
attr { key: 'shared_name' value { s: '' } }
""", q.accumulator_ref.op.node_def)
def testAccumulatorSizeEmpty(self):
with self.test_session():
q = data_flow_ops.SparseConditionalAccumulator(
dtypes_lib.float32, name="Q")
self.assertEqual(q.num_accumulated().eval(), 0)
def testAccumulatorSetGlobalStep(self):
with self.test_session():
q = data_flow_ops.SparseConditionalAccumulator(
dtypes_lib.float32, name="Q", shape=tensor_shape.TensorShape([1]))
set_global_step_op = q.set_global_step(1)
set_global_step_op.run()
def testAccumulatorApplyGradFloat32(self):
with self.test_session():
q = data_flow_ops.SparseConditionalAccumulator(
dtypes_lib.float32, name="Q", shape=tensor_shape.TensorShape([3, 3]))
accum_op = q.apply_indexed_slices_grad(
ops.IndexedSlices(
indices=[0, 2],
values=np.array([[0, 0, 1], [3, 0, 4]]).astype(np.float32)))
accum_op.run()
self.assertEqual(q.num_accumulated().eval(), 1)
def testDtypes(self):
with self.test_session() as sess:
dtypes = [dtypes_lib.float16, dtypes_lib.float32, dtypes_lib.float64]
for i in range(len(dtypes)):
dtype = dtypes[i]
q = data_flow_ops.SparseConditionalAccumulator(
dtype, shape=tensor_shape.TensorShape([3, 3, 3]))
elems = np.arange(2)
sum_elems = np.zeros([3, 3, 3]).astype(dtype.as_numpy_dtype)
for e in elems:
mat_to_add = np.zeros([3, 3, 3]).astype(dtype.as_numpy_dtype)
mat_to_add[i, i, i] = e + 1
sum_elems += mat_to_add
t = _indexedslice(mat_to_add)
q.apply_indexed_slices_grad(t).run()
result = sess.run(q.take_indexed_slices_grad(1))
self._assertEqual_nparray(sum_elems / len(elems), result, sess)
def testAccumulatorMultipleAccumulators(self):
with self.test_session() as sess:
q_f32_0 = data_flow_ops.SparseConditionalAccumulator(
dtypes_lib.float32, name="Q", shape=tensor_shape.TensorShape([2, 2]))
q_f32_1 = data_flow_ops.SparseConditionalAccumulator(
dtypes_lib.float32, name="Q", shape=tensor_shape.TensorShape([2, 2]))
q_f16_0 = data_flow_ops.SparseConditionalAccumulator(
dtypes_lib.float16, name="Q", shape=tensor_shape.TensorShape([2, 2]))
q_f16_1 = data_flow_ops.SparseConditionalAccumulator(
dtypes_lib.float16, name="Q", shape=tensor_shape.TensorShape([2, 2]))
accums = [q_f16_0, q_f16_1, q_f32_0, q_f32_1]
elems = [[[1, 0], [0, 0]], [[0, 1], [0, 0]], [[0, 0], [1, 0]], [[0, 0],
[0, 1]]]
expected_tensors = []
for i in range(len(accums)):
tensor_to_add = np.array(elems[i]).astype(accums[i]
.dtype.as_numpy_dtype)
expected_tensor = _indexedslice(tensor_to_add)
expected_tensors.append(expected_tensor)
st = _indexedslice(tensor_to_add)
accums[i].apply_indexed_slices_grad(st).run()
for i in range(len(accums)):
result = sess.run(accums[i].take_indexed_slices_grad(1))
self._assertEqual_indexedslices(expected_tensors[i], result)
def testAccumulatorTakeGrad(self):
with self.test_session() as sess:
q = data_flow_ops.SparseConditionalAccumulator(
dtypes_lib.float32, name="Q", shape=())
grad_indexed_slices = ops.IndexedSlices(
indices=[0, 1], values=np.array([[1, 0], [0, 2]]).astype(np.float32))
accum_op = q.apply_indexed_slices_grad(grad_indexed_slices)
accum_op.run()
accum_op = q.apply_grad([0, 2],
np.array([[0, 1], [3, 0]]).astype(np.float32),
[3, 2])
accum_op.run()
takeg_t = q.take_indexed_slices_grad(1)
val = sess.run(takeg_t)
self.assertAllEqual(val.indices, [0, 1, 2])
self.assertAllEqual(val.values, [[0.5, 0.5], [0, 2], [3, 0]])
self.assertAllEqual(val.dense_shape, [-1, 2])
def testAccumulatorRepeatedTakeGrad(self):
with self.test_session() as sess:
q = data_flow_ops.SparseConditionalAccumulator(
dtypes_lib.float32, name="Q", shape=())
grad_indexed_slices = ops.IndexedSlices(
indices=[0, 1], values=np.array([[1, 0], [0, 2]]).astype(np.float32))
accum_op = q.apply_indexed_slices_grad(grad_indexed_slices, local_step=0)
accum_op.run()
accum_op = q.apply_grad(
[0, 2],
np.array([[0, 1], [3, 0]]).astype(np.float32), [3, 2],
local_step=0)
accum_op.run()
takeg_t = q.take_indexed_slices_grad(1)
val = sess.run(takeg_t)
self.assertAllEqual(val.indices, [0, 1, 2])
self.assertAllEqual(val.values, [[0.5, 0.5], [0, 2], [3, 0]])
self.assertAllEqual(val.dense_shape, [-1, 2])
grad_indexed_slices = ops.IndexedSlices(
indices=[0, 1],
values=np.array([[10, 0], [0, 20]]).astype(np.float32))
accum_op = q.apply_indexed_slices_grad(grad_indexed_slices, local_step=1)
accum_op.run()
accum_op = q.apply_grad(
[0, 2],
np.array([[0, 10], [30, 0]]).astype(np.float32), [3, 2],
local_step=1)
accum_op.run()
takeg_t = q.take_indexed_slices_grad(1)
val = sess.run(takeg_t)
self.assertAllEqual(val.indices, [0, 1, 2])
self.assertAllEqual(val.values, [[5, 5], [0, 20], [30, 0]])
self.assertAllEqual(val.dense_shape, [-1, 2])
def testParallelApplyGrad(self):
with self.test_session() as sess:
q = data_flow_ops.SparseConditionalAccumulator(
dtypes_lib.float32, name="Q", shape=tensor_shape.TensorShape([2, 2]))
elems = [10.0, 20.0, 30.0, 40.0, 50.0, 60.0, 70.0, 80.0, 90.0, 100.0]
accum_ops = []
for x in elems:
x = _indexedslice(np.array([[x, 0], [0, x]]).astype(np.float32))
accum_ops.append(q.apply_indexed_slices_grad(x, local_step=0))
takeg_t = q.take_indexed_slices_grad(1)
def apply_indexed_slices_grad(accum_op):
sess.run(accum_op)
threads = [
self.checkedThread(
target=apply_indexed_slices_grad, args=(o,)) for o in accum_ops
]
for thread in threads:
thread.start()
for thread in threads:
thread.join()
val = sess.run(takeg_t)
expected_val = sum(elems) / len(elems)
self._assertEqual_nparray(
np.array([[expected_val, 0], [0, expected_val]]).astype(np.float32),
val, sess)
def testParallelTakeGrad(self):
with self.test_session() as sess:
q = data_flow_ops.SparseConditionalAccumulator(
dtypes_lib.float32, name="Q", shape=tensor_shape.TensorShape([2, 2]))
elems = [e + 1 for e in range(10)]
accum_ops = []
for e in elems:
v = _indexedslice(np.array([[0, 0], [e, 0]]).astype(np.float32))
accum_ops.append(q.apply_indexed_slices_grad(v, local_step=e - 1))
takeg_t = q.take_indexed_slices_grad(1)
results = []
def apply_indexed_slices_grad():
for accum_op in accum_ops:
time.sleep(1.0)
sess.run(accum_op)
apply_indexed_slices_grad_thread = self.checkedThread(
target=apply_indexed_slices_grad)
def take_grad():
t = sess.run(takeg_t)
results.append(t)
threads = [self.checkedThread(target=take_grad) for _ in range(10)]
for thread in threads:
thread.start()
apply_indexed_slices_grad_thread.start()
for thread in threads:
thread.join()
apply_indexed_slices_grad_thread.join()
for i in range(len(accum_ops)):
self._assertEqual_nparray(
np.array([[0, 0], [elems[i], 0]]), results[i], sess)
def testAccumulatorApplyAndBlockingTake(self):
with self.test_session() as sess:
q = data_flow_ops.SparseConditionalAccumulator(
dtypes_lib.float32, name="Q", shape=tensor_shape.TensorShape([2, 2]))
elems = [10.0, 20.0, 30.0]
elems_ave = sum(elems) / len(elems)
accum_ops = []
for x in elems:
x = _indexedslice(np.array([[0, x], [0, 0]]).astype(np.float32))
accum_ops.append(q.apply_indexed_slices_grad(x, local_step=0))
takeg_t = q.take_indexed_slices_grad(3)
results = []
def apply_indexed_slices_grad():
for accum_op in accum_ops:
sess.run(accum_op)
def take_grad():
results.append(sess.run(takeg_t))
accum_thread = self.checkedThread(target=apply_indexed_slices_grad)
takeg_thread = self.checkedThread(target=take_grad)
accum_thread.start()
takeg_thread.start()
accum_thread.join()
takeg_thread.join()
self._assertEqual_nparray([[0, elems_ave], [0, 0]], results[0], sess)
def _blocking_takeg(self, sess, takeg_op):
with self.assertRaisesOpError("was cancelled"):
sess.run(takeg_op)
def testAccumulatorCancel(self):
with self.test_session() as sess:
q = data_flow_ops.SparseConditionalAccumulator(
dtypes_lib.float32,
name="Q",
shape=tensor_shape.TensorShape([1, 2, 3]))
takeg_t = q.take_indexed_slices_grad(1)
takeg_thread = self.checkedThread(
self._blocking_takeg, args=(sess, takeg_t))
takeg_thread.start()
time.sleep(1.0)
sess.close() # Will cancel blocked operation
takeg_thread.join()
def testNonVectorIndices(self):
with self.test_session():
q = data_flow_ops.SparseConditionalAccumulator(
dtypes_lib.float32, name="Q", shape=tensor_shape.TensorShape([3, 3]))
with self.assertRaisesRegexp(
errors_impl.InvalidArgumentError,
"Input indices should be vector but received shape:"):
q.apply_grad(
grad_indices=[[0, 1], [1, 0]],
grad_values=np.array([1, 2]).astype(np.float32)).run()
def testZeroDimensionValues(self):
with self.test_session():
q = data_flow_ops.SparseConditionalAccumulator(
dtypes_lib.float32, name="Q", shape=tensor_shape.TensorShape([3, 3]))
with self.assertRaisesRegexp(errors_impl.InvalidArgumentError,
"Values cannot be 0-dimensional."):
q.apply_grad(
grad_indices=[0], grad_values=np.array(1).astype(np.float32)).run()
def testWrongNonEmptyInputValues(self):
with self.test_session():
q = data_flow_ops.SparseConditionalAccumulator(
dtypes_lib.float32, name="Q", shape=tensor_shape.TensorShape([3, 3]))
with self.assertRaisesRegexp(errors_impl.InvalidArgumentError,
" non-empty input values, got "):
q.apply_grad(
grad_indices=[0, 1],
grad_values=np.array([[0, 1, 1]]).astype(np.float32)).run()
def testDynamicNonVectorIndices(self):
with self.test_session() as sess:
q = data_flow_ops.SparseConditionalAccumulator(
dtypes_lib.float32, name="Q", shape=tensor_shape.TensorShape([3, 3]))
x_indices = array_ops.placeholder(dtypes_lib.int64)
x_values = array_ops.placeholder(dtypes_lib.float32)
accum_op = q.apply_grad(grad_indices=x_indices, grad_values=x_values)
with self.assertRaisesRegexp(
errors_impl.InvalidArgumentError,
"Input indices should be vector but received shape:"):
sess.run(accum_op,
feed_dict={
x_indices: [[0, 1], [1, 0]],
x_values: np.array([1, 2]).astype(np.float32)
})
def testDynamicWrongNonEmptyInputValues(self):
with self.test_session() as sess:
q = data_flow_ops.SparseConditionalAccumulator(
dtypes_lib.float32, name="Q", shape=tensor_shape.TensorShape([3, 3]))
x_indices = array_ops.placeholder(dtypes_lib.int64)
x_values = array_ops.placeholder(dtypes_lib.float32)
accum_op = q.apply_grad(grad_indices=x_indices, grad_values=x_values)
with self.assertRaisesRegexp(errors_impl.InvalidArgumentError,
" non-empty input values, got "):
sess.run(accum_op,
feed_dict={
x_indices: [0, 1],
x_values: np.array([[0, 1, 1]]).astype(np.float32)
})
def testEmptyShapeApply(self):
with self.test_session():
q = data_flow_ops.SparseConditionalAccumulator(
dtypes_lib.float32, name="Q", shape=tensor_shape.TensorShape([]))
with self.assertRaisesRegexp(errors_impl.InvalidArgumentError,
"Input indices should be vector"):
q.apply_grad(grad_indices=0, grad_values=[1.0], grad_shape=[]).run()
with self.assertRaisesRegexp(errors_impl.InvalidArgumentError,
"Input indices should be vector"):
q.apply_grad(grad_indices=0, grad_values=[1.0]).run()
with self.assertRaisesRegexp(errors_impl.InvalidArgumentError,
"Values cannot be 0-dimensional."):
q.apply_grad(grad_indices=[0], grad_values=1.0, grad_shape=[]).run()
with self.assertRaisesRegexp(errors_impl.InvalidArgumentError,
"Values cannot be 0-dimensional."):
q.apply_grad(grad_indices=[0], grad_values=1.0).run()
# The right way to apply a scalar
q.apply_grad(grad_indices=[0], grad_values=[1.0], grad_shape=[]).run()
q.apply_grad(grad_indices=[0], grad_values=[1.0]).run()
def testValidateShape(self):
with self.test_session() as sess:
q = data_flow_ops.SparseConditionalAccumulator(
dtypes_lib.float32, name="Q", shape=[2, 2, None])
# Provided shape has wrong rank
with self.assertRaisesRegexp(
errors_impl.InvalidArgumentError,
"Shape mismatch: expected shape rank at least 3, got 2"):
q.apply_grad(
grad_indices=[0],
grad_values=np.array([[1, 2]]).astype(np.float32),
grad_shape=[2, 2]).run()
# Provided shape has wrong dim
with self.assertRaisesRegexp(
errors_impl.InvalidArgumentError,
"Shape mismatch: expected shape dim 1 to be 2, got 3"):
q.apply_grad(
grad_indices=[0],
grad_values=np.array([[[1, 2], [3, 4], [5, 6]]]).astype(np.float32),
grad_shape=[2, 3, 2]).run()
# Indices exceeded accumulator's shape's limits
with self.assertRaisesRegexp(
errors_impl.InvalidArgumentError,
"Shape mismatch: index of slice 0 exceeded limits of shape;"
" index is 3 exceeded 2"):
q.apply_grad(
grad_indices=[3],
grad_values=np.array([[[1, 2], [3, 4]]]).astype(np.float32)).run()
# Values' rank does not match shape
with self.assertRaisesRegexp(
errors_impl.InvalidArgumentError,
"Shape mismatch: expected values rank at least 3, got 2"):
q.apply_grad(
grad_indices=[0, 1],
grad_values=np.array([[1, 2], [3, 4]]).astype(np.float32)).run()
# Values' dim does not match shape
with self.assertRaisesRegexp(
errors_impl.InvalidArgumentError,
"Shape mismatch: expected values dim 1 to be 2, got 3"):
q.apply_grad(
grad_indices=[0],
grad_values=np.array(
[[[1, 2], [3, 4], [5, 6]]]).astype(np.float32)).run()
# First successful gradient creates additional constraints
# Shape will be additionally be constrained to [None,2,2,2] hereafter.
q.apply_grad(
grad_indices=[0],
grad_values=np.array(
[[[[1, 2], [3, 4]], [[5, 6], [7, 8]]]]).astype(np.float32)).run()
# Values' rank does not match accumulated gradient
with self.assertRaisesRegexp(
errors_impl.InvalidArgumentError,
"Shape mismatch: expected values rank 4, got 3"):
q.apply_grad(
grad_indices=[0],
grad_values=np.array([[[1, 2], [3, 4]]]).astype(np.float32)).run()
# Values' dim does not match accumulated gradient
with self.assertRaisesRegexp(
errors_impl.InvalidArgumentError,
"Shape mismatch: expected values dim 3 to be 2, got 3"):
q.apply_grad(
grad_indices=[0],
grad_values=np.array(
[[[[1, 2, 3], [4, 5, 6]], [[7, 8, 9], [10, 11, 12]]]]).astype(
np.float32)).run()
# After take grad, constraints on accumulated gradient are removed
sess.run(q.take_grad(1))
# First successful gradient imposes new constraints.
# Hereafter, shape will additionally constrained to [None,2,2,3]
q.apply_grad(
grad_indices=[0],
grad_values=np.array(
[[[[1, 2, 3], [4, 5, 6]], [[7, 8, 9], [10, 11, 12]]]]).astype(
np.float32),
local_step=1).run()
with self.assertRaisesRegexp(
errors_impl.InvalidArgumentError,
"Shape mismatch: expected values dim 3 to be 3, got 2"):
q.apply_grad(
grad_indices=[0],
grad_values=np.array(
[[[[1, 2], [3, 4]], [[5, 6], [7, 8]]]]).astype(np.float32),
local_step=1).run()
def testReturnShape(self):
with self.test_session() as sess:
q = data_flow_ops.SparseConditionalAccumulator(
dtypes_lib.float32, name="Q", shape=[2, None])
q.apply_grad(
grad_indices=[0],
grad_values=np.array(
[[[[1, 2], [3, 4]], [[5, 6], [7, 8]]]]).astype(np.float32)).run()
val = sess.run(q.take_indexed_slices_grad(1))
self.assertAllEqual(val.dense_shape, [2, 2, 2, 2])
q = data_flow_ops.SparseConditionalAccumulator(
dtypes_lib.float32, name="Q", shape=[None, 2])
q.apply_grad(
grad_indices=[0],
grad_values=np.array(
[[[[1, 2, 3], [4, 5, 6]], [[7, 8, 9], [10, 11, 12]]]]).astype(
np.float32)).run()
val = sess.run(q.take_indexed_slices_grad(1))
self.assertAllEqual(val.dense_shape, [-1, 2, 2, 3])
def testApplyGradtInt32IndicesAndShape(self):
with self.test_session() as sess:
q = data_flow_ops.SparseConditionalAccumulator(
dtypes_lib.float32, name="Q", shape=tensor_shape.TensorShape([3, 3]))
accum_op = q.apply_grad(
grad_indices=constant_op.constant(
[0, 2], dtype=dtypes_lib.int32),
grad_values=constant_op.constant(
[[0, 0, 1], [3, 0, 4]], dtype=dtypes_lib.float32),
grad_shape=constant_op.constant(
[3, 3], dtype=dtypes_lib.int32))
accum_op.run()
accum_op = q.apply_indexed_slices_grad(
ops.IndexedSlices(
indices=constant_op.constant(
[0, 2], dtype=dtypes_lib.int32),
values=constant_op.constant(
[[0, 0, 1], [3, 0, 4]], dtype=dtypes_lib.float32),
dense_shape=constant_op.constant(
[3, 3], dtype=dtypes_lib.int32)))
accum_op.run()
self.assertEqual(q.num_accumulated().eval(), 2)
val = sess.run(q.take_indexed_slices_grad(1))
self.assertAllEqual(val.indices, [0, 2])
self.assertAllEqual(val.values, [[0, 0, 1], [3, 0, 4]])
self.assertAllEqual(val.dense_shape, [3, 3])
if __name__ == "__main__":
test.main()
| apache-2.0 | 8,486,528,713,473,834,000 | 37.644781 | 80 | 0.594947 | false |
bitfinder/thrift | contrib/fb303/py/fb303/FacebookBase.py | 173 | 1917 | #!/usr/bin/env python
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import time
import FacebookService
import thrift.reflection.limited
from ttypes import fb_status
class FacebookBase(FacebookService.Iface):
def __init__(self, name):
self.name = name
self.alive = int(time.time())
self.counters = {}
def getName(self, ):
return self.name
def getVersion(self, ):
return ''
def getStatus(self, ):
return fb_status.ALIVE
def getCounters(self):
return self.counters
def resetCounter(self, key):
self.counters[key] = 0
def getCounter(self, key):
if self.counters.has_key(key):
return self.counters[key]
return 0
def incrementCounter(self, key):
self.counters[key] = self.getCounter(key) + 1
def setOption(self, key, value):
pass
def getOption(self, key):
return ""
def getOptions(self):
return {}
def getOptions(self):
return {}
def aliveSince(self):
return self.alive
def getCpuProfile(self, duration):
return ""
def getLimitedReflection(self):
return thrift.reflection.limited.Service()
def reinitialize(self):
pass
def shutdown(self):
pass
| apache-2.0 | -6,590,703,830,105,765,000 | 22.378049 | 61 | 0.710485 | false |
tjsavage/full_nonrel_starter | django/templatetags/static.py | 233 | 2149 | from django import template
from django.utils.encoding import iri_to_uri
register = template.Library()
class PrefixNode(template.Node):
def __repr__(self):
return "<PrefixNode for %r>" % self.name
def __init__(self, varname=None, name=None):
if name is None:
raise template.TemplateSyntaxError(
"Prefix nodes must be given a name to return.")
self.varname = varname
self.name = name
@classmethod
def handle_token(cls, parser, token, name):
"""
Class method to parse prefix node and return a Node.
"""
tokens = token.contents.split()
if len(tokens) > 1 and tokens[1] != 'as':
raise template.TemplateSyntaxError(
"First argument in '%s' must be 'as'" % tokens[0])
if len(tokens) > 1:
varname = tokens[2]
else:
varname = None
return cls(varname, name)
@classmethod
def handle_simple(cls, name):
try:
from django.conf import settings
except ImportError:
prefix = ''
else:
prefix = iri_to_uri(getattr(settings, name, ''))
return prefix
def render(self, context):
prefix = self.handle_simple(self.name)
if self.varname is None:
return prefix
context[self.varname] = prefix
return ''
@register.tag
def get_static_prefix(parser, token):
"""
Populates a template variable with the static prefix,
``settings.STATIC_URL``.
Usage::
{% get_static_prefix [as varname] %}
Examples::
{% get_static_prefix %}
{% get_static_prefix as static_prefix %}
"""
return PrefixNode.handle_token(parser, token, "STATIC_URL")
@register.tag
def get_media_prefix(parser, token):
"""
Populates a template variable with the static prefix,
``settings.MEDIA_URL``.
Usage::
{% get_media_prefix [as varname] %}
Examples::
{% get_media_prefix %}
{% get_media_prefix as media_prefix %}
"""
return PrefixNode.handle_token(parser, token, "MEDIA_URL")
| bsd-3-clause | 1,365,877,122,325,783,800 | 24.583333 | 66 | 0.579805 | false |
michhar/flask-webapp-aml | env/Lib/site-packages/wtforms/i18n.py | 142 | 2175 | import os
def messages_path():
"""
Determine the path to the 'messages' directory as best possible.
"""
module_path = os.path.abspath(__file__)
locale_path = os.path.join(os.path.dirname(module_path), 'locale')
if not os.path.exists(locale_path):
locale_path = '/usr/share/locale'
return locale_path
def get_builtin_gnu_translations(languages=None):
"""
Get a gettext.GNUTranslations object pointing at the
included translation files.
:param languages:
A list of languages to try, in order. If omitted or None, then
gettext will try to use locale information from the environment.
"""
import gettext
return gettext.translation('wtforms', messages_path(), languages)
def get_translations(languages=None, getter=get_builtin_gnu_translations):
"""
Get a WTForms translation object which wraps a low-level translations object.
:param languages:
A sequence of languages to try, in order.
:param getter:
A single-argument callable which returns a low-level translations object.
"""
translations = getter(languages)
if hasattr(translations, 'ugettext'):
return DefaultTranslations(translations)
else:
# Python 3 has no ugettext/ungettext, so just return the translations object.
return translations
class DefaultTranslations(object):
"""
A WTForms translations object to wrap translations objects which use
ugettext/ungettext.
"""
def __init__(self, translations):
self.translations = translations
def gettext(self, string):
return self.translations.ugettext(string)
def ngettext(self, singular, plural, n):
return self.translations.ungettext(singular, plural, n)
class DummyTranslations(object):
"""
A translations object which simply returns unmodified strings.
This is typically used when translations are disabled or if no valid
translations provider can be found.
"""
def gettext(self, string):
return string
def ngettext(self, singular, plural, n):
if n == 1:
return singular
return plural
| mit | 7,357,021,100,776,778,000 | 28 | 85 | 0.68092 | false |
xakepru/x14.11-coding-hyperv | src/hyperv4/distorm/examples/tests/test_distorm3.py | 4 | 68909 | #
# Gil Dabah 2006, http://ragestorm.net/distorm
# Tests for diStorm3
#
import os
import distorm3
from distorm3 import *
import struct
import unittest
import random
REG_NONE = 255
_REGISTERS = ["RAX", "RCX", "RDX", "RBX", "RSP", "RBP", "RSI", "RDI", "R8", "R9", "R10", "R11", "R12", "R13", "R14", "R15",
"EAX", "ECX", "EDX", "EBX", "ESP", "EBP", "ESI", "EDI", "R8D", "R9D", "R10D", "R11D", "R12D", "R13D", "R14D", "R15D",
"AX", "CX", "DX", "BX", "SP", "BP", "SI", "DI", "R8W", "R9W", "R10W", "R11W", "R12W", "R13W", "R14W", "R15W",
"AL", "CL", "DL", "BL", "AH", "CH", "DH", "BH", "R8B", "R9B", "R10B", "R11B", "R12B", "R13B", "R14B", "R15B",
"SPL", "BPL", "SIL", "DIL",
"ES", "CS", "SS", "DS", "FS", "GS",
"RIP",
"ST0", "ST1", "ST2", "ST3", "ST4", "ST5", "ST6", "ST7",
"MM0", "MM1", "MM2", "MM3", "MM4", "MM5", "MM6", "MM7",
"XMM0", "XMM1", "XMM2", "XMM3", "XMM4", "XMM5", "XMM6", "XMM7", "XMM8", "XMM9", "XMM10", "XMM11", "XMM12", "XMM13", "XMM14", "XMM15",
"YMM0", "YMM1", "YMM2", "YMM3", "YMM4", "YMM5", "YMM6", "YMM7", "YMM8", "YMM9", "YMM10", "YMM11", "YMM12", "YMM13", "YMM14", "YMM15",
"CR0", "", "CR2", "CR3", "CR4", "", "", "", "CR8",
"DR0", "DR1", "DR2", "DR3", "", "", "DR6", "DR7"]
class Registers(object):
def __init__(self):
for i in enumerate(_REGISTERS):
if len(i[1]):
setattr(self, i[1], i[0])
Regs = Registers()
fbin = []
def Assemble(text, mode):
lines = text.replace("\n", "\r\n")
if mode is None:
mode = 32
lines = ("bits %d\r\n" % mode) + lines
open("1.asm", "wb").write(lines)
if mode == 64:
mode = "amd64"
else:
mode = "x86"
os.system("c:\\yasm.exe -m%s 1.asm" % mode)
return open("1", "rb").read()
class InstBin(unittest.TestCase):
def __init__(self, bin, mode):
bin = bin.decode("hex")
#fbin[mode].write(bin)
self.insts = Decompose(0, bin, mode)
self.inst = self.insts[0]
def check_valid(self, instsNo = 1):
self.assertNotEqual(self.inst.rawFlags, 65535)
self.assertEqual(len(self.insts), instsNo)
def check_invalid(self):
self.assertEqual(self.inst.rawFlags, 65535)
def check_mnemonic(self, mnemonic, instNo = 0):
self.assertNotEqual(self.inst.rawFlags, 65535)
self.assertEqual(self.insts[instNo].mnemonic, mnemonic)
class Inst(unittest.TestCase):
def __init__(self, instText, mode, instNo, features):
modeSize = [16, 32, 64][mode]
bin = Assemble(instText, modeSize)
#print map(lambda x: hex(ord(x)), bin)
#fbin[mode].write(bin)
self.insts = Decompose(0, bin, mode)
self.inst = self.insts[instNo]
def check_mnemonic(self, mnemonic):
self.assertEqual(self.inst.mnemonic, mnemonic)
def check_imm(self, n, val, sz):
self.assertEqual(self.inst.operands[n].type, distorm3.OPERAND_IMMEDIATE)
self.assertEqual(self.inst.operands[n].size, sz)
self.assertEqual(self.inst.operands[n].value, val)
def check_reg(self, n, idx, sz):
self.assertEqual(self.inst.operands[n].type, distorm3.OPERAND_REGISTER)
self.assertEqual(self.inst.operands[n].size, sz)
self.assertEqual(self.inst.operands[n].index, idx)
def check_pc(self, val, sz):
self.assertEqual(self.inst.operands[0].type, distorm3.OPERAND_IMMEDIATE)
self.assertEqual(self.inst.operands[0].size, sz)
self.assertEqual(self.inst.operands[0].value, val)
def check_disp(self, n, val, dispSize, derefSize):
self.assertEqual(self.inst.operands[n].type, distorm3.OPERAND_MEMORY)
self.assertEqual(self.inst.operands[n].dispSize, dispSize)
self.assertEqual(self.inst.operands[n].size, derefSize)
self.assertEqual(self.inst.operands[n].disp, val)
def check_abs_disp(self, n, val, dispSize, derefSize):
self.assertEqual(self.inst.operands[n].type, distorm3.OPERAND_ABSOLUTE_ADDRESS)
self.assertEqual(self.inst.operands[n].dispSize, dispSize)
self.assertEqual(self.inst.operands[n].size, derefSize)
self.assertEqual(self.inst.operands[n].disp, val)
def check_simple_deref(self, n, idx, derefSize):
""" Checks whether a (simple) memory dereference type is used, size of deref is in ops.size.
Displacement is ignored in this check. """
self.assertEqual(self.inst.operands[n].type, distorm3.OPERAND_MEMORY)
self.assertEqual(self.inst.operands[n].size, derefSize)
self.assertEqual(self.inst.operands[n].index, idx)
def check_deref(self, n, idx, base, derefSize):
""" Checks whether a memory dereference type is used, size of deref is in ops.size.
Base registers is in inst.base.
Displacement is ignored in this check. """
self.assertEqual(self.inst.operands[n].type, distorm3.OPERAND_MEMORY)
self.assertEqual(self.inst.operands[n].size, derefSize)
self.assertEqual(self.inst.operands[n].index, idx)
self.assertEqual(self.inst.operands[n].base, base)
def check_type_size(self, n, t, sz):
self.assertEqual(self.inst.operands[n].type, t)
self.assertEqual(self.inst.operands[n].size, sz)
def check_addr_size(self, sz):
self.assertEqual({0: 16, 1: 32, 2: 64}[(self.inst.rawFlags >> 10) & 3], sz)
def I16(instText, instNo = 0, features = 0):
return Inst(instText, Decode16Bits, instNo, features)
def I32(instText, features = 0):
return Inst(instText, Decode32Bits, 0, features)
def IB32(bin):
return InstBin(bin, Decode32Bits)
def I64(instText, features = 0):
return Inst(instText, Decode64Bits, 0, features)
def IB64(bin):
return InstBin(bin, Decode64Bits)
def ABS64(x):
return x
#return struct.unpack("q", struct.pack("Q", x))[0]
class TestMode16(unittest.TestCase):
Derefs = ["BX + SI", "BX + DI", "BP + SI", "BP + DI", "SI", "DI", "BP", "BX"]
DerefsInfo = [(Regs.BX, Regs.SI), (Regs.BX, Regs.DI), (Regs.BP, Regs.SI), (Regs.BP, Regs.DI),
(Regs.SI,), (Regs.DI,), (Regs.BP,), (Regs.BX,)]
def test_none(self):
self.failIf(len(I16("cbw").inst.operands) > 0)
def test_imm8(self):
I16("int 0x55").check_imm(0, 0x55, 8)
def test_imm16(self):
I16("ret 0x1122").check_imm(0, 0x1122, 16)
def test_imm_full(self):
I16("push 0x1234").check_imm(0, 0x1234, 16)
def test_imm_aadm(self):
I16("aam").check_imm(0, 0xa, 8)
I16("aam 0x15").check_imm(0, 0x15, 8)
I16("aad").check_imm(0, 0xa, 8)
I16("aad 0x51").check_imm(0, 0x51, 8)
def test_seimm(self):
I16("push 5").check_imm(0, 0x5, 8)
a = I16("push -6")
self.assertEqual(a.inst.size, 2)
a.check_type_size(0, distorm3.OPERAND_IMMEDIATE, 8)
self.failIf(ABS64(a.inst.operands[0].value) != -6)
a = I16("db 0x66\n push -5")
self.assertEqual(a.inst.size, 3)
a.check_type_size(0, distorm3.OPERAND_IMMEDIATE, 32)
self.failIf(ABS64(a.inst.operands[0].value) != -5)
def test_imm16_1_imm8_2(self):
a = I16("enter 0x1234, 0x40")
a.check_imm(0, 0x1234, 16)
a.check_imm(1, 0x40, 8)
def test_imm8_1_imm8_2(self):
a = I16("extrq xmm0, 0x55, 0xff")
a.check_imm(1, 0x55, 8)
a.check_imm(2, 0xff, 8)
def test_reg8(self):
I16("inc dh").check_reg(0, Regs.DH, 8)
def test_reg16(self):
I16("arpl ax, bp").check_reg(1, Regs.BP, 16)
def test_reg_full(self):
I16("dec di").check_reg(0, Regs.DI, 16)
def test_reg32(self):
I16("movmskps ebx, xmm6").check_reg(0, Regs.EBX, 32)
def test_reg32_64(self):
I16("cvttsd2si esp, xmm3").check_reg(0, Regs.ESP, 32)
def test_freg32_64_rm(self):
I16("mov cr0, eax").check_reg(1, Regs.EAX, 32)
def test_rm8(self):
I16("seto dh").check_reg(0, Regs.DH, 8)
def test_rm16(self):
I16("str di").check_reg(0, Regs.DI, 16)
def test_rm_full(self):
I16("push bp").check_reg(0, Regs.BP, 16)
def test_rm32_64(self):
I16("movd xmm0, ebx").check_reg(1, Regs.EBX, 32)
def test_fpum16(self):
I16("fiadd word [bx]").check_simple_deref(0, Regs.BX, 16)
def test_fpum32(self):
I16("fisttp dword [si]").check_simple_deref(0, Regs.SI, 32)
def test_fpum64(self):
I16("fadd qword [esp]").check_simple_deref(0, Regs.ESP, 64)
def test_fpum80(self):
I16("fbld [eax]").check_simple_deref(0, Regs.EAX, 80)
def test_r32_m8(self):
I16("pinsrb xmm4, eax, 0x55").check_reg(1, Regs.EAX, 32)
I16("pinsrb xmm4, [bx], 0x55").check_simple_deref(1, Regs.BX, 8)
def test_r32_m16(self):
I16("pinsrw xmm4, edi, 0x55").check_reg(1, Regs.EDI, 32)
I16("pinsrw xmm1, word [si], 0x55").check_simple_deref(1, Regs.SI, 16)
def test_r32_64_m8(self):
I16("pextrb eax, xmm4, 0xaa").check_reg(0, Regs.EAX, 32)
I16("pextrb [bx], xmm2, 0xaa").check_simple_deref(0, Regs.BX, 8)
def test_r32_64_m16(self):
I16("pextrw esp, xmm7, 0x11").check_reg(0, Regs.ESP, 32)
I16("pextrw [bp], xmm0, 0xbb").check_simple_deref(0, Regs.BP, 16)
def test_rfull_m16(self):
I16("smsw ax").check_reg(0, Regs.AX, 16)
I16("smsw [bx]").check_simple_deref(0, Regs.BX, 16)
def test_creg(self):
I16("mov esp, cr3").check_reg(1, Regs.CR3, 32)
#I16("mov esp, cr8").check_reg(1, Regs.CR8, 32)
def test_dreg(self):
I16("mov edi, dr7").check_reg(1, Regs.DR7, 32)
def test_sreg(self):
I16("mov ax, ds").check_reg(1, Regs.DS, 16)
def test_seg(self):
I16("push fs").check_reg(0, Regs.FS, 16)
I16("db 0x66\n push es").check_reg(0, Regs.ES, 16)
def test_acc8(self):
I16("in al, 0x60").check_reg(0, Regs.AL, 8)
def test_acc_full(self):
I16("add ax, 0x100").check_reg(0, Regs.AX, 16)
def test_acc_full_not64(self):
I16("out 0x64, ax").check_reg(1, Regs.AX, 16)
def test_mem16_full(self):
I16("call far [bp]").check_simple_deref(0, Regs.BP, 16)
def test_ptr16_full(self):
a = I16("jmp 0xffff:0x1234").inst
self.assertEqual(a.size, 5)
self.assertEqual(a.operands[0].type, distorm3.OPERAND_FAR_MEMORY)
self.assertEqual(a.operands[0].size, 16)
self.assertEqual(a.operands[0].seg, 0xffff)
self.assertEqual(a.operands[0].off, 0x1234)
def test_mem16_3264(self):
I16("sgdt [bx]").check_simple_deref(0, Regs.BX, 32)
def test_relcb(self):
a = I16("db 0xe9\ndw 0x00")
a.check_pc(3, 16)
a = I16("db 0xe2\ndb 0x50")
a.check_pc(0x52, 8)
a = I16("db 0xe2\ndb 0xfd")
a.check_pc(-1, 8)
a = I16("db 0x67\ndb 0xe2\ndb 0xf0")
a.check_pc(-0xd, 8)
def test_relc_full(self):
a = I16("jmp 0x100")
self.assertEqual(a.inst.size, 3)
a.check_type_size(0, distorm3.OPERAND_IMMEDIATE, 16)
def test_mem(self):
I16("lea ax, [bx]").check_simple_deref(1, Regs.BX, 0)
def test_mem32(self):
I16("movntss [ebx], xmm5").check_simple_deref(0, Regs.EBX, 32)
def test_mem32_64(self):
I16("movnti [ebx], eax").check_simple_deref(0, Regs.EBX, 32)
def test_mem64(self):
I16("movlps [edi], xmm7").check_simple_deref(0, Regs.EDI, 64)
def test_mem128(self):
I16("movntps [eax], xmm3").check_simple_deref(0, Regs.EAX, 128)
def test_mem64_128(self):
I16("cmpxchg8b [edx]").check_simple_deref(0, Regs.EDX, 64)
def test_moffs8(self):
I16("mov al, [0x1234]").check_abs_disp(1, 0x1234, 16, 8)
I16("mov [dword 0x11112222], al").check_abs_disp(0, 0x11112222, 32, 8)
def test_moff_full(self):
I16("mov [0x8765], ax").check_abs_disp(0, 0x8765, 16, 16)
I16("mov ax, [dword 0x11112222]").check_abs_disp(1, 0x11112222, 32, 16)
def test_const1(self):
I16("shl si, 1").check_imm(1, 1, 8)
def test_regcl(self):
I16("rcl bp, cl").check_reg(1, Regs.CL, 8)
def test_ib_rb(self):
I16("mov dl, 0x88").check_reg(0, Regs.DL, 8)
def test_ib_r_dw_qw(self):
I16("bswap ecx").check_reg(0, Regs.ECX, 32)
def test_ib_r_full(self):
I16("inc si").check_reg(0, Regs.SI, 16)
def test_regi_esi(self):
I16("lodsb").check_simple_deref(1, Regs.SI, 8)
I16("cmpsw").check_simple_deref(0, Regs.SI, 16)
I16("lodsd").check_simple_deref(1, Regs.SI, 32)
def test_regi_edi(self):
I16("movsb").check_simple_deref(0, Regs.DI, 8)
I16("scasw").check_simple_deref(0, Regs.DI, 16)
I16("stosd").check_simple_deref(0, Regs.DI, 32)
def test_regi_ebxal(self):
a = I16("xlatb")
a.check_type_size(0, distorm3.OPERAND_MEMORY, 8)
self.failIf(a.inst.operands[0].index != Regs.AL)
self.failIf(a.inst.operands[0].base != Regs.BX)
def test_regi_eax(self):
I16("vmrun [ax]").check_simple_deref(0, Regs.AX, 16)
def test_regdx(self):
I16("in ax, dx").check_reg(1, Regs.DX, 16)
def test_regecx(self):
I16("invlpga [eax], ecx").check_reg(1, Regs.ECX, 32)
def test_fpu_si(self):
I16("fxch st4").check_reg(0, Regs.ST4, 32)
def test_fpu_ssi(self):
a = I16("fcmovnbe st0, st3")
a.check_reg(0, Regs.ST0, 32)
a.check_reg(1, Regs.ST3, 32)
def test_fpu_sis(self):
a = I16("fadd st3, st0")
a.check_reg(0, Regs.ST3, 32)
a.check_reg(1, Regs.ST0, 32)
def test_mm(self):
I16("pand mm0, mm7").check_reg(0, Regs.MM0, 64)
def test_mm_rm(self):
I16("psllw mm0, 0x55").check_reg(0, Regs.MM0, 64)
def test_mm32(self):
I16("punpcklbw mm1, [si]").check_simple_deref(1, Regs.SI, 32)
def test_mm64(self):
I16("packsswb mm3, [bx]").check_simple_deref(1, Regs.BX, 64)
def test_xmm(self):
I16("orps xmm5, xmm4").check_reg(0, Regs.XMM5, 128)
def test_xmm_rm(self):
I16("psrlw xmm6, 0x12").check_reg(0, Regs.XMM6, 128)
def test_xmm16(self):
I16("pmovsxbq xmm3, [bp]").check_simple_deref(1, Regs.BP, 16)
def test_xmm32(self):
I16("pmovsxwq xmm5, [di]").check_simple_deref(1, Regs.DI, 32)
def test_xmm64(self):
I16("roundsd xmm6, [si], 0x55").check_simple_deref(1, Regs.SI, 64)
def test_xmm128(self):
I16("roundpd xmm7, [bx], 0xaa").check_simple_deref(1, Regs.BX, 128)
def test_regxmm0(self):
I16("blendvpd xmm1, xmm3, xmm0").check_reg(2, Regs.XMM0, 128)
def test_disp_only(self):
a = I16("add [0x1234], bx")
a.check_type_size(0, distorm3.OPERAND_ABSOLUTE_ADDRESS, 16)
self.failIf(a.inst.operands[0].dispSize != 16)
self.failIf(a.inst.operands[0].disp != 0x1234)
def test_modrm(self):
texts = ["ADD [%s], AX" % i for i in self.Derefs]
for i in enumerate(texts):
a = I16(i[1])
if len(self.DerefsInfo[i[0]]) == 2:
a.check_deref(0, self.DerefsInfo[i[0]][1], self.DerefsInfo[i[0]][0], 16)
else:
a.check_simple_deref(0, self.DerefsInfo[i[0]][0], 16)
def test_modrm_disp8(self):
texts = ["ADD [%s + 0x55], AX" % i for i in self.Derefs]
for i in enumerate(texts):
a = I16(i[1])
if len(self.DerefsInfo[i[0]]) == 2:
a.check_deref(0, self.DerefsInfo[i[0]][1], self.DerefsInfo[i[0]][0], 16)
else:
a.check_simple_deref(0, self.DerefsInfo[i[0]][0], 16)
self.failIf(a.inst.operands[0].dispSize != 8)
self.failIf(a.inst.operands[0].disp != 0x55)
def test_modrm_disp16(self):
texts = ["ADD [%s + 0x3322], AX" % i for i in self.Derefs]
for i in enumerate(texts):
a = I16(i[1])
if len(self.DerefsInfo[i[0]]) == 2:
a.check_deref(0, self.DerefsInfo[i[0]][1], self.DerefsInfo[i[0]][0], 16)
else:
a.check_simple_deref(0, self.DerefsInfo[i[0]][0], 16)
self.failIf(a.inst.operands[0].dispSize != 16)
self.failIf(a.inst.operands[0].disp != 0x3322)
class TestMode32(unittest.TestCase):
Derefs = ["EAX", "ECX", "EDX", "EBX", "EBP", "ESI", "EDI"]
DerefsInfo = [Regs.EAX, Regs.ECX, Regs.EDX, Regs.EBX, Regs.EBP, Regs.ESI, Regs.EDI]
def test_none(self):
self.failIf(len(I32("cdq").inst.operands) > 0)
def test_imm8(self):
I32("int 0x55").check_imm(0, 0x55, 8)
def test_imm16(self):
I32("ret 0x1122").check_imm(0, 0x1122, 16)
def test_imm_full(self):
I32("push 0x12345678").check_imm(0, 0x12345678, 32)
def test_imm_aadm(self):
I32("aam").check_imm(0, 0xa, 8)
I32("aam 0x15").check_imm(0, 0x15, 8)
I32("aad").check_imm(0, 0xa, 8)
I32("aad 0x51").check_imm(0, 0x51, 8)
def test_seimm(self):
I32("push 6").check_imm(0, 0x6, 8)
a = I32("push -7")
self.assertEqual(a.inst.size, 2)
a.check_type_size(0, distorm3.OPERAND_IMMEDIATE, 8)
self.failIf(ABS64(a.inst.operands[0].value) != -7)
a = I32("db 0x66\n push -5")
self.assertEqual(a.inst.size, 3)
a.check_type_size(0, distorm3.OPERAND_IMMEDIATE, 16)
self.failIf(ABS64(a.inst.operands[0].value) != -5)
def test_imm16_1_imm8_2(self):
a = I32("enter 0x1234, 0x40")
a.check_imm(0, 0x1234, 16)
a.check_imm(1, 0x40, 8)
def test_imm8_1_imm8_2(self):
a = I32("extrq xmm0, 0x55, 0xff")
a.check_imm(1, 0x55, 8)
a.check_imm(2, 0xff, 8)
def test_reg8(self):
I32("inc dh").check_reg(0, Regs.DH, 8)
def test_reg16(self):
I32("arpl ax, bp").check_reg(1, Regs.BP, 16)
def test_reg_full(self):
I32("dec edi").check_reg(0, Regs.EDI, 32)
def test_reg32(self):
I32("movmskps ebx, xmm6").check_reg(0, Regs.EBX, 32)
def test_reg32_64(self):
I32("cvttsd2si esp, xmm3").check_reg(0, Regs.ESP, 32)
def test_freg32_64_rm(self):
I32("mov cr0, eax").check_reg(1, Regs.EAX, 32)
def test_rm8(self):
I32("seto dh").check_reg(0, Regs.DH, 8)
def test_rm16(self):
I32("verr di").check_reg(0, Regs.DI, 16)
def test_rm_full(self):
I32("push ebp").check_reg(0, Regs.EBP, 32)
def test_rm32_64(self):
I32("movd xmm0, ebx").check_reg(1, Regs.EBX, 32)
def test_fpum16(self):
I32("fiadd word [ebx]").check_simple_deref(0, Regs.EBX, 16)
def test_fpum32(self):
I32("fisttp dword [esi]").check_simple_deref(0, Regs.ESI, 32)
def test_fpum64(self):
I32("fadd qword [esp]").check_simple_deref(0, Regs.ESP, 64)
def test_fpum80(self):
I32("fbld [eax]").check_simple_deref(0, Regs.EAX, 80)
def test_r32_m8(self):
I32("pinsrb xmm4, eax, 0x55").check_reg(1, Regs.EAX, 32)
I32("pinsrb xmm4, [ebx], 0x55").check_simple_deref(1, Regs.EBX, 8)
def test_r32_m16(self):
I32("pinsrw xmm4, edi, 0x55").check_reg(1, Regs.EDI, 32)
I32("pinsrw xmm1, word [esi], 0x55").check_simple_deref(1, Regs.ESI, 16)
def test_r32_64_m8(self):
I32("pextrb eax, xmm4, 0xaa").check_reg(0, Regs.EAX, 32)
I32("pextrb [ebx], xmm2, 0xaa").check_simple_deref(0, Regs.EBX, 8)
def test_r32_64_m16(self):
I32("pextrw esp, xmm7, 0x11").check_reg(0, Regs.ESP, 32)
I32("pextrw [ebp], xmm0, 0xbb").check_simple_deref(0, Regs.EBP, 16)
def test_rfull_m16(self):
I32("smsw eax").check_reg(0, Regs.EAX, 32)
I32("smsw [ebx]").check_simple_deref(0, Regs.EBX, 16)
def test_creg(self):
I32("mov esp, cr3").check_reg(1, Regs.CR3, 32)
def test_dreg(self):
I32("mov edi, dr7").check_reg(1, Regs.DR7, 32)
def test_sreg(self):
I32("mov ax, ds").check_reg(1, Regs.DS, 16)
def test_seg(self):
I32("push ss").check_reg(0, Regs.SS, 16)
I32("db 0x66\n push ds").check_reg(0, Regs.DS, 16)
def test_acc8(self):
I32("in al, 0x60").check_reg(0, Regs.AL, 8)
def test_acc_full(self):
I32("add eax, 0x100").check_reg(0, Regs.EAX, 32)
def test_acc_full_not64(self):
I32("out 0x64, eax").check_reg(1, Regs.EAX, 32)
def test_mem16_full(self):
I32("call far [ebp]").check_simple_deref(0, Regs.EBP, 32)
def test_ptr16_full(self):
a = I32("jmp 0xffff:0x12345678").inst
self.assertEqual(a.size, 7)
self.assertEqual(a.operands[0].type, distorm3.OPERAND_FAR_MEMORY)
self.assertEqual(a.operands[0].size, 32)
self.assertEqual(a.operands[0].seg, 0xffff)
self.assertEqual(a.operands[0].off, 0x12345678)
def test_mem16_3264(self):
I32("sgdt [ebx]").check_simple_deref(0, Regs.EBX, 32)
def test_relcb(self):
a = I32("db 0xe9\ndd 0x00")
a.check_pc(5, 32)
a = I32("db 0xe2\ndb 0x50")
a.check_pc(0x52, 8)
a = I32("db 0xe2\ndb 0xfd")
a.check_pc(-1, 8)
a = I32("db 0x67\ndb 0xe2\ndb 0xf0")
a.check_pc(-0xd, 8)
def test_relc_full(self):
a = I32("jmp 0x100")
self.assertEqual(a.inst.size, 5)
a.check_type_size(0, distorm3.OPERAND_IMMEDIATE, 32)
def test_mem(self):
I32("lea ax, [ebx]").check_simple_deref(1, Regs.EBX, 0)
def test_mem32(self):
I32("movntss [ebx], xmm5").check_simple_deref(0, Regs.EBX, 32)
def test_mem32_64(self):
I32("movnti [edi], eax").check_simple_deref(0, Regs.EDI, 32)
def test_mem64(self):
I32("movlps [edi], xmm7").check_simple_deref(0, Regs.EDI, 64)
def test_mem128(self):
I32("movntps [eax], xmm3").check_simple_deref(0, Regs.EAX, 128)
def test_mem64_128(self):
I32("cmpxchg8b [edx]").check_simple_deref(0, Regs.EDX, 64)
def test_moffs8(self):
I32("mov al, [word 0x5678]").check_abs_disp(1, 0x5678, 16, 8)
I32("mov [0x11112222], al").check_abs_disp(0, 0x11112222, 32, 8)
def test_moff_full(self):
I32("mov [word 0x4321], eax").check_abs_disp(0, 0x4321, 16, 32)
I32("mov eax, [0x11112222]").check_abs_disp(1, 0x11112222, 32, 32)
def test_const1(self):
I32("shl esi, 1").check_imm(1, 1, 8)
def test_regcl(self):
I32("rcl ebp, cl").check_reg(1, Regs.CL, 8)
def test_ib_rb(self):
I32("mov dl, 0x88").check_reg(0, Regs.DL, 8)
def test_ib_r_dw_qw(self):
I32("bswap ecx").check_reg(0, Regs.ECX, 32)
def test_ib_r_full(self):
I32("inc esi").check_reg(0, Regs.ESI, 32)
def test_regi_esi(self):
I32("lodsb").check_simple_deref(1, Regs.ESI, 8)
I32("cmpsw").check_simple_deref(0, Regs.ESI, 16)
I32("lodsd").check_simple_deref(1, Regs.ESI, 32)
def test_regi_edi(self):
I32("movsb").check_simple_deref(0, Regs.EDI, 8)
I32("scasw").check_simple_deref(0, Regs.EDI, 16)
I32("stosd").check_simple_deref(0, Regs.EDI, 32)
def test_regi_ebxal(self):
a = I32("xlatb")
a.check_type_size(0, distorm3.OPERAND_MEMORY, 8)
self.failIf(a.inst.operands[0].index != Regs.AL)
self.failIf(a.inst.operands[0].base != Regs.EBX)
def test_regi_eax(self):
I32("vmrun [eax]").check_simple_deref(0, Regs.EAX, 32)
def test_regdx(self):
I32("in eax, dx").check_reg(1, Regs.DX, 16)
def test_regecx(self):
I32("invlpga [eax], ecx").check_reg(1, Regs.ECX, 32)
def test_fpu_si(self):
I32("fxch st4").check_reg(0, Regs.ST4, 32)
def test_fpu_ssi(self):
a = I32("fcmovnbe st0, st3")
a.check_reg(0, Regs.ST0, 32)
a.check_reg(1, Regs.ST3, 32)
def test_fpu_sis(self):
a = I32("fadd st3, st0")
a.check_reg(0, Regs.ST3, 32)
a.check_reg(1, Regs.ST0, 32)
def test_mm(self):
I32("pand mm0, mm7").check_reg(0, Regs.MM0, 64)
def test_mm_rm(self):
I32("psllw mm0, 0x55").check_reg(0, Regs.MM0, 64)
def test_mm32(self):
I32("punpcklbw mm1, [esi]").check_simple_deref(1, Regs.ESI, 32)
def test_mm64(self):
I32("packsswb mm3, [ebx]").check_simple_deref(1, Regs.EBX, 64)
def test_xmm(self):
I32("orps xmm5, xmm4").check_reg(0, Regs.XMM5, 128)
def test_xmm_rm(self):
I32("psrlw xmm6, 0x12").check_reg(0, Regs.XMM6, 128)
def test_xmm16(self):
I32("pmovsxbq xmm3, [ebp]").check_simple_deref(1, Regs.EBP, 16)
def test_xmm32(self):
I32("pmovsxwq xmm5, [edi]").check_simple_deref(1, Regs.EDI, 32)
def test_xmm64(self):
I32("roundsd xmm6, [esi], 0x55").check_simple_deref(1, Regs.ESI, 64)
def test_xmm128(self):
I32("roundpd xmm7, [ebx], 0xaa").check_simple_deref(1, Regs.EBX, 128)
def test_regxmm0(self):
I32("blendvpd xmm1, xmm3, xmm0").check_reg(2, Regs.XMM0, 128)
def test_cr8(self):
I32("db 0xf0\n mov cr0, eax").check_reg(0, Regs.CR8, 32)
def test_disp_only(self):
a = I32("add [0x12345678], ebx")
a.check_type_size(0, distorm3.OPERAND_ABSOLUTE_ADDRESS, 32)
self.failIf(a.inst.operands[0].dispSize != 32)
self.failIf(a.inst.operands[0].disp != 0x12345678)
def test_modrm(self):
texts = ["ADD [%s], EDI" % i for i in self.Derefs]
for i in enumerate(texts):
a = I32(i[1])
a.check_simple_deref(0, self.DerefsInfo[i[0]], 32)
def test_modrm_disp8(self):
texts = ["ADD [%s + 0x55], ESI" % i for i in self.Derefs]
for i in enumerate(texts):
a = I32(i[1])
a.check_simple_deref(0, self.DerefsInfo[i[0]], 32)
self.failIf(a.inst.operands[0].dispSize != 8)
self.failIf(a.inst.operands[0].disp != 0x55)
def test_modrm_disp32(self):
texts = ["ADD [%s + 0x33221144], EDX" % i for i in self.Derefs]
for i in enumerate(texts):
a = I32(i[1])
a.check_simple_deref(0, self.DerefsInfo[i[0]], 32)
self.failIf(a.inst.operands[0].dispSize != 32)
self.failIf(a.inst.operands[0].disp != 0x33221144)
def test_base_ebp(self):
a = I32("mov [ebp+0x55], eax")
a.check_simple_deref(0, Regs.EBP, 32)
self.failIf(a.inst.operands[0].dispSize != 8)
self.failIf(a.inst.operands[0].disp != 0x55)
a = I32("mov [ebp+0x55+eax], eax")
a.check_deref(0, Regs.EAX, Regs.EBP, 32)
self.failIf(a.inst.operands[0].dispSize != 8)
self.failIf(a.inst.operands[0].disp != 0x55)
a = I32("mov [ebp+0x55443322], eax")
a.check_simple_deref(0, Regs.EBP, 32)
self.failIf(a.inst.operands[0].dispSize != 32)
self.failIf(a.inst.operands[0].disp != 0x55443322)
Bases = ["EAX", "ECX", "EDX", "EBX", "ESP", "ESI", "EDI"]
BasesInfo = [Regs.EAX, Regs.ECX, Regs.EDX, Regs.EBX, Regs.ESP, Regs.ESI, Regs.EDI]
Indices = ["EAX", "ECX", "EDX", "EBX", "EBP", "ESI", "EDI"]
IndicesInfo = [Regs.EAX, Regs.ECX, Regs.EDX, Regs.EBX, Regs.EBP, Regs.ESI, Regs.EDI]
def test_bases(self):
for i in enumerate(self.Bases):
a = I32("cmp ebp, [%s]" % (i[1]))
a.check_simple_deref(1, self.BasesInfo[i[0]], 32)
def test_bases_disp32(self):
for i in enumerate(self.Bases):
a = I32("cmp ebp, [%s+0x12345678]" % (i[1]))
a.check_simple_deref(1, self.BasesInfo[i[0]], 32)
self.failIf(a.inst.operands[1].dispSize != 32)
self.failIf(a.inst.operands[1].disp != 0x12345678)
def test_scales(self):
for i in enumerate(self.Indices):
# A scale of 2 causes the scale to be omitted and changed from reg*2 to reg+reg.
for s in [4, 8]:
a = I32("and bp, [%s*%d]" % (i[1], s))
a.check_deref(1, self.IndicesInfo[i[0]], None, 16)
self.failIf(a.inst.operands[1].scale != s)
def test_sib(self):
for i in enumerate(self.Indices):
for j in enumerate(self.Bases):
for s in [1, 2, 4, 8]:
a = I32("or bp, [%s*%d + %s]" % (i[1], s, j[1]))
a.check_deref(1, self.IndicesInfo[i[0]], self.BasesInfo[j[0]], 16)
if s != 1:
self.failIf(a.inst.operands[1].scale != s)
def test_sib_disp8(self):
for i in enumerate(self.Indices):
for j in enumerate(self.Bases):
for s in [1, 2, 4, 8]:
a = I32("xor al, [%s*%d + %s + 0x55]" % (i[1], s, j[1]))
a.check_deref(1, self.IndicesInfo[i[0]], self.BasesInfo[j[0]], 8)
self.failIf(a.inst.operands[1].dispSize != 8)
self.failIf(a.inst.operands[1].disp != 0x55)
if s != 1:
self.failIf(a.inst.operands[1].scale != s)
def test_sib_disp32(self):
for i in enumerate(self.Indices):
for j in enumerate(self.Bases):
for s in [1, 2, 4, 8]:
a = I32("sub ebp, [%s*%d + %s + 0x55aabbcc]" % (i[1], s, j[1]))
a.check_deref(1, self.IndicesInfo[i[0]], self.BasesInfo[j[0]], 32)
self.failIf(a.inst.operands[1].dispSize != 32)
self.failIf(a.inst.operands[1].disp != 0x55aabbcc)
if s != 1:
self.failIf(a.inst.operands[1].scale != s)
class TestMode64(unittest.TestCase):
Derefs = ["RAX", "RCX", "RDX", "RBX", "RBP", "RSI", "RDI"]
DerefsInfo = [Regs.RAX, Regs.RCX, Regs.RDX, Regs.RBX, Regs.RBP, Regs.RSI, Regs.RDI]
def test_none(self):
self.failIf(len(I64("cdq").inst.operands) > 0)
def test_imm8(self):
I64("int 0x55").check_imm(0, 0x55, 8)
def test_imm16(self):
I64("ret 0x1122").check_imm(0, 0x1122, 16)
def test_imm_full(self):
I64("push 0x12345678").check_imm(0, 0x12345678, 64)
def test_imm_aadm(self):
#I64("aam").check_imm(0, 0xa, 8)
#I64("aam 0x15").check_imm(0, 0x15, 8)
#I64("aad").check_imm(0, 0xa, 8)
#I64("aad 0x51").check_imm(0, 0x51, 8)
pass
def test_seimm(self):
I64("push 6").check_imm(0, 0x6, 8)
a = I64("push -7")
self.assertEqual(a.inst.size, 2)
a.check_type_size(0, distorm3.OPERAND_IMMEDIATE, 8)
self.failIf(ABS64(a.inst.operands[0].value) != -7)
def test_imm16_1_imm8_2(self):
a = I64("enter 0x1234, 0x40")
a.check_imm(0, 0x1234, 16)
a.check_imm(1, 0x40, 8)
def test_imm8_1_imm8_2(self):
a = I64("extrq xmm0, 0x55, 0xff")
a.check_imm(1, 0x55, 8)
a.check_imm(2, 0xff, 8)
def test_reg8(self):
I64("inc dh").check_reg(0, Regs.DH, 8)
def test_reg_full(self):
I64("dec rdi").check_reg(0, Regs.RDI, 64)
I64("cmp r15, r14").check_reg(0, Regs.R15, 64)
I64("cmp r8d, r9d").check_reg(0, Regs.R8D, 32)
I64("cmp r9w, r8w").check_reg(0, Regs.R9W, 16)
def test_reg32(self):
I64("movmskps ebx, xmm6").check_reg(0, Regs.EBX, 32)
I64("movmskps r11d, xmm6").check_reg(0, Regs.R11D, 32)
def test_reg32_64(self):
I64("cvttsd2si rsp, xmm3").check_reg(0, Regs.RSP, 64)
I64("cvttsd2si r14, xmm3").check_reg(0, Regs.R14, 64)
def test_freg32_64_rm(self):
I64("mov cr0, rax").check_reg(1, Regs.RAX, 64)
I64("mov cr0, r14").check_reg(1, Regs.R14, 64)
def test_rm8(self):
I64("seto dh").check_reg(0, Regs.DH, 8)
def test_rm16(self):
I64("verr di").check_reg(0, Regs.DI, 16)
I64("verr r8w").check_reg(0, Regs.R8W, 16)
def test_rm_full(self):
I64("push rbp").check_reg(0, Regs.RBP, 64)
def test_rm32_64(self):
I64("movq xmm0, rdx").check_reg(1, Regs.RDX, 64)
I64("movq xmm0, r10").check_reg(1, Regs.R10, 64)
I64("cvtsi2sd xmm0, rdx").check_reg(1, Regs.RDX, 64)
I64("vmread rax, rax").check_reg(1, Regs.RAX, 64)
def test_rm16_32(self):
I64("movsxd rax, eax").check_reg(1, Regs.EAX, 32)
I64("movzx rax, ax").check_reg(1, Regs.AX, 16)
def test_fpum16(self):
I64("fiadd word [rbx]").check_simple_deref(0, Regs.RBX, 16)
def test_fpum32(self):
I64("fisttp dword [rsi]").check_simple_deref(0, Regs.RSI, 32)
def test_fpum64(self):
I64("fadd qword [rsp]").check_simple_deref(0, Regs.RSP, 64)
def test_fpum80(self):
I64("fbld [rax]").check_simple_deref(0, Regs.RAX, 80)
def test_r32_m8(self):
I64("pinsrb xmm4, eax, 0x55").check_reg(1, Regs.EAX, 32)
I64("pinsrb xmm4, [rbx], 0x55").check_simple_deref(1, Regs.RBX, 8)
def test_r32_m16(self):
I64("pinsrw xmm4, edi, 0x55").check_reg(1, Regs.EDI, 32)
I64("pinsrw xmm1, word [rsi], 0x55").check_simple_deref(1, Regs.RSI, 16)
I64("pinsrw xmm1, r8d, 0x55").check_reg(1, Regs.R8D, 32)
def test_r32_64_m8(self):
I64("pextrb eax, xmm4, 0xaa").check_reg(0, Regs.EAX, 32)
I64("pextrb [rbx], xmm2, 0xaa").check_simple_deref(0, Regs.RBX, 8)
def test_r32_64_m16(self):
I64("pextrw esp, xmm7, 0x11").check_reg(0, Regs.ESP, 32)
I64("pextrw [rbp], xmm0, 0xbb").check_simple_deref(0, Regs.RBP, 16)
def test_rfull_m16(self):
I64("smsw eax").check_reg(0, Regs.EAX, 32)
I64("smsw [rbx]").check_simple_deref(0, Regs.RBX, 16)
def test_creg(self):
I64("mov rsp, cr3").check_reg(1, Regs.CR3, 64)
I64("mov cr8, rdx").check_reg(0, Regs.CR8, 64)
def test_dreg(self):
I64("mov rdi, dr7").check_reg(1, Regs.DR7, 64)
def test_sreg(self):
I64("mov ax, fs").check_reg(1, Regs.FS, 16)
def test_seg(self):
I64("push gs").check_reg(0, Regs.GS, 16)
def test_acc8(self):
I64("in al, 0x60").check_reg(0, Regs.AL, 8)
def test_acc_full(self):
I64("add rax, 0x100").check_reg(0, Regs.RAX, 64)
def test_acc_full_not64(self):
I64("out 0x64, eax").check_reg(1, Regs.EAX, 32)
def test_mem16_full(self):
I64("call far [rbp]").check_simple_deref(0, Regs.RBP, 32)
I64("db 0x48\n call far [rbp]").check_simple_deref(0, Regs.RBP, 64)
def test_mem16_3264(self):
I64("sgdt [rbx]").check_simple_deref(0, Regs.RBX, 64)
def test_relcb(self):
a = I64("db 0xe9\ndd 0x00")
a.check_pc(5, 32)
a = I64("db 0xe2\ndb 0x50")
a.check_pc(0x52, 8)
a = I64("db 0xe2\ndb 0xfd")
a.check_pc(-1, 8)
a = I64("db 0x67\ndb 0xe2\ndb 0xf0")
a.check_pc(-0xd, 8)
def test_relc_full(self):
a = I64("jmp 0x100")
self.assertEqual(a.inst.size, 5)
a.check_type_size(0, distorm3.OPERAND_IMMEDIATE, 32)
def test_mem(self):
I64("lea ax, [rbx]").check_simple_deref(1, Regs.RBX, 0)
def test_mem32(self):
I64("movntss [rbx], xmm5").check_simple_deref(0, Regs.RBX, 32)
def test_mem32_64(self):
I64("movnti [rdi], eax").check_simple_deref(0, Regs.RDI, 32)
I64("movnti [rbp], rax").check_simple_deref(0, Regs.RBP, 64)
def test_mem64(self):
I64("movlps [rdi], xmm7").check_simple_deref(0, Regs.RDI, 64)
def test_mem128(self):
I64("movntps [rax], xmm3").check_simple_deref(0, Regs.RAX, 128)
def test_mem64_128(self):
I64("cmpxchg8b [rdx]").check_simple_deref(0, Regs.RDX, 64)
I64("cmpxchg16b [rbx]").check_simple_deref(0, Regs.RBX, 128)
def test_moffs8(self):
I64("mov al, [dword 0x12345678]").check_abs_disp(1, 0x12345678, 32, 8)
I64("mov [qword 0xaaaabbbbccccdddd], al").check_abs_disp(0, 0xaaaabbbbccccdddd, 64, 8)
def test_moff_full(self):
I64("mov [dword 0xaaaabbbb], rax").check_abs_disp(0, 0xffffffffaaaabbbb, 32, 64)
I64("mov rax, [qword 0xaaaabbbbccccdddd]").check_abs_disp(1, 0xaaaabbbbccccdddd, 64, 64)
def test_const1(self):
I64("shl rsi, 1").check_imm(1, 1, 8)
def test_regcl(self):
I64("rcl rbp, cl").check_reg(1, Regs.CL, 8)
def test_ib_rb(self):
I64("mov dl, 0x88").check_reg(0, Regs.DL, 8)
I64("mov spl, 0x88").check_reg(0, Regs.SPL, 8)
I64("mov r10b, 0x88").check_reg(0, Regs.R10B, 8)
def test_ib_r_dw_qw(self):
I64("bswap rcx").check_reg(0, Regs.RCX, 64)
I64("bswap r10").check_reg(0, Regs.R10, 64)
I64("push r10").check_reg(0, Regs.R10, 64)
def test_ib_r_full(self):
I64("inc rsi").check_reg(0, Regs.RSI, 64)
I64("inc r9").check_reg(0, Regs.R9, 64)
I64("push r10w").check_reg(0, Regs.R10W, 16)
I64("xchg r10d, eax").check_reg(0, Regs.R10D, 32)
def test_regi_esi(self):
I64("lodsb").check_simple_deref(1, Regs.RSI, 8)
I64("cmpsw").check_simple_deref(0, Regs.RSI, 16)
I64("lodsd").check_simple_deref(1, Regs.RSI, 32)
I64("lodsq").check_simple_deref(1, Regs.RSI, 64)
def test_regi_edi(self):
I64("movsb").check_simple_deref(0, Regs.RDI, 8)
I64("scasw").check_simple_deref(0, Regs.RDI, 16)
I64("stosd").check_simple_deref(0, Regs.RDI, 32)
I64("stosq").check_simple_deref(0, Regs.RDI, 64)
def test_regi_ebxal(self):
a = I64("xlatb")
a.check_type_size(0, distorm3.OPERAND_MEMORY, 8)
self.failIf(a.inst.operands[0].index != Regs.AL)
self.failIf(a.inst.operands[0].base != Regs.RBX)
def test_regi_eax(self):
I64("vmrun [rax]").check_simple_deref(0, Regs.RAX, 64)
def test_regdx(self):
#I64("in eax, dx").check_reg(1, Regs.DX, 16)
pass
def test_regecx(self):
I64("invlpga [rax], ecx").check_reg(1, Regs.ECX, 32)
def test_fpu_si(self):
I64("fxch st4").check_reg(0, Regs.ST4, 32)
def test_fpu_ssi(self):
a = I64("fcmovnbe st0, st3")
a.check_reg(0, Regs.ST0, 32)
a.check_reg(1, Regs.ST3, 32)
def test_fpu_sis(self):
a = I64("fadd st3, st0")
a.check_reg(0, Regs.ST3, 32)
a.check_reg(1, Regs.ST0, 32)
def test_mm(self):
I64("pand mm0, mm7").check_reg(0, Regs.MM0, 64)
def test_mm_rm(self):
I64("psllw mm0, 0x55").check_reg(0, Regs.MM0, 64)
def test_mm32(self):
I64("punpcklbw mm1, [rsi]").check_simple_deref(1, Regs.RSI, 32)
def test_mm64(self):
I64("packsswb mm3, [rbx]").check_simple_deref(1, Regs.RBX, 64)
def test_xmm(self):
I64("orps xmm5, xmm4").check_reg(0, Regs.XMM5, 128)
I64("orps xmm15, xmm4").check_reg(0, Regs.XMM15, 128)
def test_xmm_rm(self):
I64("psrlw xmm6, 0x12").check_reg(0, Regs.XMM6, 128)
I64("psrlw xmm13, 0x12").check_reg(0, Regs.XMM13, 128)
def test_xmm16(self):
I64("pmovsxbq xmm3, [rbp]").check_simple_deref(1, Regs.RBP, 16)
def test_xmm32(self):
I64("pmovsxwq xmm5, [rdi]").check_simple_deref(1, Regs.RDI, 32)
def test_xmm64(self):
I64("roundsd xmm6, [rsi], 0x55").check_simple_deref(1, Regs.RSI, 64)
def test_xmm128(self):
I64("roundpd xmm7, [rbx], 0xaa").check_simple_deref(1, Regs.RBX, 128)
I64("roundpd xmm7, xmm15, 0xaa").check_reg(1, Regs.XMM15, 128)
def test_regxmm0(self):
I64("blendvpd xmm1, xmm3, xmm0").check_reg(2, Regs.XMM0, 128)
def test_disp_only(self):
a = I64("add [0x12345678], rbx")
a.check_type_size(0, distorm3.OPERAND_ABSOLUTE_ADDRESS, 64)
self.failIf(a.inst.operands[0].dispSize != 32)
self.failIf(a.inst.operands[0].disp != 0x12345678)
def test_modrm(self):
texts = ["ADD [%s], RDI" % i for i in self.Derefs]
for i in enumerate(texts):
a = I64(i[1])
a.check_simple_deref(0, self.DerefsInfo[i[0]], 64)
def test_modrm_disp8(self):
texts = ["ADD [%s + 0x55], RSI" % i for i in self.Derefs]
for i in enumerate(texts):
a = I64(i[1])
a.check_simple_deref(0, self.DerefsInfo[i[0]], 64)
self.failIf(a.inst.operands[0].dispSize != 8)
self.failIf(a.inst.operands[0].disp != 0x55)
def test_modrm_disp32(self):
texts = ["ADD [%s + 0x33221144], RDX" % i for i in self.Derefs]
for i in enumerate(texts):
a = I64(i[1])
a.check_simple_deref(0, self.DerefsInfo[i[0]], 64)
self.failIf(a.inst.operands[0].dispSize != 32)
self.failIf(a.inst.operands[0].disp != 0x33221144)
def test_base_rbp(self):
a = I64("mov [rbp+0x55], eax")
a.check_simple_deref(0, Regs.RBP, 32)
self.failIf(a.inst.operands[0].dispSize != 8)
self.failIf(a.inst.operands[0].disp != 0x55)
a = I64("mov [rbp+0x55443322], eax")
a.check_simple_deref(0, Regs.RBP, 32)
self.failIf(a.inst.operands[0].dispSize != 32)
self.failIf(a.inst.operands[0].disp != 0x55443322)
def test_base_rip(self):
a = I64("mov [rip+0x12345678], rdx")
a.check_simple_deref(0, Regs.RIP, 64)
self.failIf(a.inst.operands[0].dispSize != 32)
self.failIf(a.inst.operands[0].disp != 0x12345678)
def test_reg8_rex(self):
I64("mov sil, al").check_reg(0, Regs.SIL, 8)
I64("inc bpl").check_reg(0, Regs.BPL, 8)
def test_imm64(self):
I64("mov rax, 0x1234567890abcdef").check_imm(1, 0x1234567890abcdef, 64)
def test_reg64(self):
I64("movsxd r10, eax").check_reg(0, Regs.R10, 64)
def test_rm16_32(self):
#MOVZXD RAX, [RAX]
I64("db 0x63\n db 0x00").check_simple_deref(1, Regs.RAX, 32)
#MOVZXDW RAX, [RAX]
#I64("db 0x66\n db 0x63\n db 0x00").check_simple_deref(1, Regs.RAX, 16)
#MOVZXD RAX, EAX
I64("db 0x63\n db 0xc0").check_reg(1, Regs.EAX, 32)
#MOVZXDW RAX, AX
#I64("db 0x66\n db 0x63\n db 0xc0").check_reg(1, Regs.AX, 16)
#MOVZXDW RAX, R8W
#I64("db 0x66\n db 0x41\n db 0x63\n db 0xc0").check_reg(1, Regs.R8W, 16)
Bases = ["RAX", "RCX", "RDX", "RBX", "RSP", "RSI", "RDI", "R8", "R9", "R10", "R11", "R12", "R13", "R14", "R15"]
BasesInfo = [Regs.RAX, Regs.RCX, Regs.RDX, Regs.RBX, Regs.RSP, Regs.RSI, Regs.RDI, Regs.R8, Regs.R9, Regs.R10, Regs.R11, Regs.R12, Regs.R13, Regs.R14, Regs.R15]
Indices = ["RAX", "RCX", "RDX", "RBX", "RBP", "RSI", "RDI", "R8", "R9", "R10", "R11", "R12", "R13", "R14", "R15"]
IndicesInfo = [Regs.RAX, Regs.RCX, Regs.RDX, Regs.RBX, Regs.RBP, Regs.RSI, Regs.RDI, Regs.R8, Regs.R9, Regs.R10, Regs.R11, Regs.R12, Regs.R13, Regs.R14, Regs.R15]
def test_bases(self):
for i in enumerate(self.Bases):
a = I64("cmp rbp, [%s]" % (i[1]))
a.check_simple_deref(1, self.BasesInfo[i[0]], 64)
def test_bases_disp32(self):
for i in enumerate(self.Bases):
a = I64("cmp rbp, [%s+0x12345678]" % (i[1]))
a.check_simple_deref(1, self.BasesInfo[i[0]], 64)
self.failIf(a.inst.operands[1].dispSize != 32)
self.failIf(a.inst.operands[1].disp != 0x12345678)
def test_scales(self):
for i in enumerate(self.Indices):
# A scale of 2 causes the scale to be omitted and changed from reg*2 to reg+reg.
for s in [4, 8]:
a = I64("and rbp, [%s*%d]" % (i[1], s))
a.check_deref(1, self.IndicesInfo[i[0]], None, 64)
self.failIf(a.inst.operands[1].scale != s)
def test_sib(self):
for i in enumerate(self.Indices):
for j in enumerate(self.Bases):
for s in [1, 2, 4, 8]:
a = I64("or rbp, [%s*%d + %s]" % (i[1], s, j[1]))
a.check_deref(1, self.IndicesInfo[i[0]], self.BasesInfo[j[0]], 64)
if s != 1:
self.failIf(a.inst.operands[1].scale != s)
def test_sib_disp8(self):
for i in enumerate(self.Indices):
for j in enumerate(self.Bases):
for s in [1, 2, 4, 8]:
a = I64("xor al, [%s*%d + %s + 0x55]" % (i[1], s, j[1]))
a.check_deref(1, self.IndicesInfo[i[0]], self.BasesInfo[j[0]], 8)
self.failIf(a.inst.operands[1].dispSize != 8)
self.failIf(a.inst.operands[1].disp != 0x55)
if s != 1:
self.failIf(a.inst.operands[1].scale != s)
def test_sib_disp32(self):
for i in enumerate(self.Indices):
for j in enumerate(self.Bases):
for s in [1, 2, 4, 8]:
a = I64("sub rdx, [%s*%d + %s + 0x55aabbcc]" % (i[1], s, j[1]))
a.check_deref(1, self.IndicesInfo[i[0]], self.BasesInfo[j[0]], 64)
self.failIf(a.inst.operands[1].dispSize != 32)
self.failIf(a.inst.operands[1].disp != 0x55aabbcc)
if s != 1:
self.failIf(a.inst.operands[1].scale != s)
def test_base32(self):
I64("mov eax, [ebx]").check_simple_deref(1, Regs.EBX, 32)
class TestInstTable(unittest.TestCase):
""" Check that locate_inst algorithm covers all opcode-length (ol)
for the varying sizes of opcodes.
The bad tests should not find an instruction, so they should fail on purpose,
to see we don't crash the diassembler.
Also test for some end-cases with nop and wait. """
def test_ol1(self):
IB32("00c0").check_mnemonic("ADD")
def test_ol13(self):
IB32("80c055").check_mnemonic("ADD")
def test_ol1d(self):
IB32("d900").check_mnemonic("FLD")
IB32("d9c8").check_mnemonic("FXCH")
IB32("d9e1").check_mnemonic("FABS")
def test_ol2(self):
IB32("0f06").check_mnemonic("CLTS")
def test_ol23(self):
IB32("0fbae055").check_mnemonic("BT")
def test_ol2d(self):
IB32("0f01e0").check_mnemonic("SMSW")
IB32("0f0130").check_mnemonic("LMSW")
IB32("0f01c9").check_mnemonic("MWAIT")
def test_ol3(self):
IB32("0f380000").check_mnemonic("PSHUFB")
def test_ol1_bad(self):
# There is no undefined instruction in the root, except a prefix, oh well.
pass
def test_ol13_bad(self):
IB32("f780").check_invalid()
def test_ol1d_bad(self):
IB32("d908").check_invalid()
IB32("d9d1").check_invalid()
IB32("d9ef").check_invalid()
def test_ol2_bad(self):
IB32("0fff").check_invalid()
def test_ol23_bad(self):
IB32("0f0dff").check_invalid()
def test_ol2d_bad(self):
IB32("0f0128").check_invalid()
IB32("0f01ca").check_invalid()
def test_ol3_bad(self):
IB32("0f0fff").check_invalid()
def test_index63(self):
# Test arpl, since it has a special treatment for 32/64 bits.
a = IB32("63c0")
a.check_mnemonic("ARPL")
a = IB64("63c0")
a.check_mnemonic("MOVSXD")
def test_index90(self):
# If nop is prefixed with f3, it is pause then. If it is prefixed with rex, it might be xchg.
IB32("90").check_mnemonic("NOP")
IB64("90").check_mnemonic("NOP")
IB64("4890").check_mnemonic("NOP")
IB64("4190").check_mnemonic("XCHG")
IB64("f390").check_mnemonic("PAUSE")
def test_wait(self):
# Wait instruction is very tricky. It might be coalesced with the next bytes.
# If the next bytes are 'waitable', otherwise it is a standalone instruction.
IB32("9b90").check_mnemonic("WAIT", 0) # nop isn't waitable.
IB32("9bdfe0").check_mnemonic("FSTSW") # waitable stsw
IB32("dfe0").check_mnemonic("FNSTSW") # non-waitable stsw
IB32("9b00c0").check_mnemonic("WAIT") # add isn't waitable
IB32("9bd930").check_mnemonic("FSTENV") # waitable fstenv
IB32("9b66dbe3").check_mnemonic("WAIT") # prefix breaks waiting
def test_3dnow(self):
IB32("0f0fc00d").check_mnemonic("PI2FD")
IB32("0f0d00").check_mnemonic("PREFETCH")
def test_mandatory(self):
IB32("f30f10c0").check_mnemonic("MOVSS")
IB32("660f10c0").check_mnemonic("MOVUPD")
IB32("660f71d055").check_mnemonic("PSRLW")
IB32("660ffec0").check_mnemonic("PADDD")
IB32("f20f10c0").check_mnemonic("MOVSD")
IB32("f20f11c0").check_mnemonic("MOVSD")
IB32("660f3800c0").check_mnemonic("PSHUFB")
IB32("f20f38f0c0").check_mnemonic("CRC32")
IB32("660fc730").check_mnemonic("VMCLEAR")
IB32("f30fc730").check_mnemonic("VMXON")
def test_vex(self):
I32("vaddpd ymm1, ymm2, ymm2").check_mnemonic("VADDPD") # pre encoding: 66, 0f, 58
I32("vaddps ymm1, ymm2, ymm2").check_mnemonic("VADDPS") # pre encoding: 0f, 58
I32("vaddsd xmm1, xmm2, qword [eax]").check_mnemonic("VADDSD") # pre encoding: f2, 0f, 58
I32("vaddss xmm1, xmm2, dword [eax]").check_mnemonic("VADDSS") # pre encoding: f3, 0f, 58
I32("vmovsd xmm1, xmm2, xmm3").check_mnemonic("VMOVSD") # pre encoding: f2, 0f, 10
I32("vmovsd xmm1, qword [eax]").check_mnemonic("VMOVSD") # pre encoding: f2 0f 10 - but VEX.vvvv is not encoded!
# Since in a VEX prefix you can encode the virtual prefix, we got three ways to get to 0xf 0x38
# So see that both work well.
IB32("c4e279dbc2").check_mnemonic("VAESIMC") # pre encoding: 66, 0f, 38, db, virtual prefix is 0f 38
IB32("c4e17938dbc2").check_mnemonic("VAESIMC") # the virtual prefix is only 0f
IB32("c5f938dbc2").check_mnemonic("VAESIMC") # the virtual prefix is only 0f, but short VEX
# Same test as earlier, but for 0xf 0x3a, though this instruction doesn't have a short form.
IB32("c4e3710dc255").check_mnemonic("VBLENDPD") # pre encoding: 66, 0f, 3a, 0d, virtual prefix is 0f 3a
IB32("c4e1713a0dc255").check_mnemonic("VBLENDPD") # pre encoding: 66, 0f, 3a, 0d, virtual prefix is 0f
I32("vldmxcsr dword [eax]").check_mnemonic("VLDMXCSR")
I32("vzeroupper").check_mnemonic("VZEROUPPER")
I32("vzeroall").check_mnemonic("VZEROALL")
I32("vpslld xmm1, xmm2, xmm3").check_mnemonic("VPSLLD")
def test_vex_special(self):
# VVVV encoded, where there is not such an encoding for the VAESIMC instruction.
IB32("c4e271dbca").check_invalid()
IB32("c4e2791800").check_mnemonic("VBROADCASTSS") # just to make sure this instruction is fine.
IB32("c4e279ff00").check_invalid() # pre encoding: 66, 0f, 38, ff
IB32("c4e179ff00").check_invalid() # pre encoding: 66, 0f, 38, ff, mmmmm = 1
IB32("c4e379ff00").check_invalid() # pre encoding: 66, 0f, 38, ff, mmmmm = 3
IB32("c4e4791800").check_invalid() # pre encoding: 66, 0f, 38, 18, mmmmm = 4
IB32("c5f8ae10").check_mnemonic("VLDMXCSR") # pre encoding: 0f, ae /02
IB32("c4c178ae10").check_mnemonic("VLDMXCSR") # longer form of 0f, ae /02
IB32("c4c179ae10").check_invalid() # longer form of 0f, ae /02, invalid pp=1
IB32("c4c17aae10").check_invalid() # longer form of 0f, ae /02, invalid pp=2
IB32("c4c17bae10").check_invalid() # longer form of 0f, ae /02, invalid pp=3
IB32("c4c17877").check_mnemonic("VZEROUPPER") # longer form of 0f, 77
IB32("c4c17c77").check_mnemonic("VZEROALL") # longer form of 0f, 77
IB32("c4c97c77").check_invalid() # longer form of 0f, 77, invalid mmmmm
def test_crc32(self):
I32("crc32 eax, al").check_reg(0, Regs.EAX, 32)
def test_lzcnt(self):
# This is the only instruction that has a mandatory prefix and can have ALSO a valid operand size prefix!
I32("lzcnt ax, bx").check_reg(0, Regs.AX, 16)
I32("lzcnt eax, ebx").check_reg(0, Regs.EAX, 32)
I64("lzcnt rax, rbx").check_reg(0, Regs.RAX, 64)
class TestAVXOperands(unittest.TestCase):
def test_rm32(self):
#I16("vextractps eax, xmm2, 3").check_reg(0, Regs.EAX, 32)
I32("vextractps eax, xmm2, 3").check_reg(0, Regs.EAX, 32)
I64("vextractps eax, xmm2, 3").check_reg(0, Regs.EAX, 32)
def test_reg32_64_m8(self):
#I16("vpextrb eax, xmm2, 3").check_reg(0, Regs.EAX, 32)
I32("vpextrb eax, xmm2, 3").check_reg(0, Regs.EAX, 32)
I64("vpextrb eax, xmm2, 3").check_reg(0, Regs.EAX, 32)
I64("vpextrb rax, xmm2, 3").check_reg(0, Regs.RAX, 64)
I32("vpextrb [ebx], xmm2, 3").check_simple_deref(0, Regs.EBX, 8)
I64("vpextrb [rbx], xmm2, 3").check_simple_deref(0, Regs.RBX, 8)
def test_reg32_64_m16(self):
I32("vpextrw eax, xmm2, 3").check_reg(0, Regs.EAX, 32)
I64("vpextrw rax, xmm2, 3").check_reg(0, Regs.RAX, 64)
I64("vpextrw rax, xmm2, 3").check_reg(0, Regs.RAX, 64)
I32("vpextrw [ebx], xmm2, 3").check_simple_deref(0, Regs.EBX, 16)
I64("vpextrw [rbx], xmm2, 3").check_simple_deref(0, Regs.RBX, 16)
def test_wreg32_64_WITH_wxmm32_64(self):
a = I32("vcvtss2si eax, xmm1")
a.check_reg(0, Regs.EAX, 32)
a.check_reg(1, Regs.XMM1, 128)
a = I64("vcvtss2si rax, [rbx]")
a.check_reg(0, Regs.RAX, 64)
a.check_simple_deref(1, Regs.RBX, 64)
a = I64("vcvtss2si eax, [rbx]")
a.check_reg(0, Regs.EAX, 32)
a.check_simple_deref(1, Regs.RBX, 32)
def test_vxmm(self):
I32("vaddsd xmm1, xmm2, xmm3").check_reg(1, Regs.XMM2, 128)
I64("vaddsd xmm2, xmm3, xmm4").check_reg(1, Regs.XMM3, 128)
def test_xmm_imm(self):
I32("vpblendvb xmm1, xmm2, xmm3, xmm4").check_reg(3, Regs.XMM4, 128)
# Force XMM15, but high bit is ignored in 32bits.
self.failIf(IB32("c4e3694ccbf0").inst.operands[3].index != Regs.XMM7)
I64("vpblendvb xmm1, xmm2, xmm3, xmm15").check_reg(3, Regs.XMM15, 128)
def test_yxmm(self):
I32("vaddsubpd ymm2, ymm4, ymm6").check_reg(0, Regs.YMM2, 256)
I32("vaddsubpd xmm7, xmm4, xmm6").check_reg(0, Regs.XMM7, 128)
I64("vaddsubpd ymm12, ymm4, ymm6").check_reg(0, Regs.YMM12, 256)
I64("vaddsubpd xmm14, xmm4, xmm6").check_reg(0, Regs.XMM14, 128)
def test_yxmm_imm(self):
I32("vblendvpd xmm1, xmm2, xmm3, xmm4").check_reg(3, Regs.XMM4, 128)
I32("vblendvpd ymm1, ymm2, ymm3, ymm4").check_reg(3, Regs.YMM4, 256)
# Force YMM15, but high bit is ignored in 32bits.
self.failIf(IB32("c4e36d4bcbf0").inst.operands[3].index != Regs.YMM7)
I64("vblendvpd xmm1, xmm2, xmm3, xmm14").check_reg(3, Regs.XMM14, 128)
I64("vblendvpd ymm1, ymm2, ymm3, ymm9").check_reg(3, Regs.YMM9, 256)
def test_ymm(self):
I32("vbroadcastsd ymm5, [eax]").check_reg(0, Regs.YMM5, 256)
I64("vbroadcastsd ymm13, [rax]").check_reg(0, Regs.YMM13, 256)
def test_ymm256(self):
I32("vperm2f128 ymm2, ymm4, [eax], 0x55").check_simple_deref(2, Regs.EAX, 256)
I64("vperm2f128 ymm2, ymm14, [rax], 0x55").check_simple_deref(2, Regs.RAX, 256)
def test_vymm(self):
I32("vinsertf128 ymm1, ymm4, xmm4, 0xaa").check_reg(1, Regs.YMM4, 256)
I64("vinsertf128 ymm1, ymm15, xmm4, 0xaa").check_reg(1, Regs.YMM15, 256)
def test_vyxmm(self):
I32("vmaxpd xmm1, xmm2, xmm3").check_reg(1, Regs.XMM2, 128)
I32("vmaxpd ymm1, ymm2, ymm3").check_reg(1, Regs.YMM2, 256)
I64("vmaxpd xmm1, xmm12, xmm3").check_reg(1, Regs.XMM12, 128)
I64("vmaxpd ymm1, ymm12, ymm3").check_reg(1, Regs.YMM12, 256)
def test_yxmm64_256(self):
I32("vmovddup xmm1, xmm2").check_reg(1, Regs.XMM2, 128)
I32("vmovddup ymm1, ymm2").check_reg(1, Regs.YMM2, 256)
I32("vmovddup xmm1, [ecx]").check_simple_deref(1, Regs.ECX, 64)
I32("vmovddup ymm1, [ebx]").check_simple_deref(1, Regs.EBX, 256)
I64("vmovddup xmm1, xmm12").check_reg(1, Regs.XMM12, 128)
I64("vmovddup ymm1, ymm12").check_reg(1, Regs.YMM12, 256)
I64("vmovddup xmm1, [rcx]").check_simple_deref(1, Regs.RCX, 64)
I64("vmovddup ymm1, [rbx]").check_simple_deref(1, Regs.RBX, 256)
def test_yxmm128_256(self):
I32("vandnpd xmm1, xmm2, xmm3").check_reg(2, Regs.XMM3, 128)
I32("vandnpd ymm1, ymm2, ymm3").check_reg(2, Regs.YMM3, 256)
I32("vandnpd xmm1, xmm2, [edi]").check_simple_deref(2, Regs.EDI, 128)
I32("vandnpd ymm1, ymm2, [esi]").check_simple_deref(2, Regs.ESI, 256)
I64("vandnpd xmm1, xmm2, xmm13").check_reg(2, Regs.XMM13, 128)
I64("vandnpd ymm1, ymm2, ymm13").check_reg(2, Regs.YMM13, 256)
I64("vandnpd xmm1, xmm2, [rdi]").check_simple_deref(2, Regs.RDI, 128)
I64("vandnpd ymm1, ymm2, [rsi]").check_simple_deref(2, Regs.RSI, 256)
def test_lxmm64_128(self):
I32("vcvtdq2pd xmm1, xmm2").check_reg(1, Regs.XMM2, 128)
I32("vcvtdq2pd xmm1, [eax]").check_simple_deref(1, Regs.EAX, 64)
I32("vcvtdq2pd ymm1, [ebx]").check_simple_deref(1, Regs.EBX, 128)
I64("vcvtdq2pd xmm1, xmm12").check_reg(1, Regs.XMM12, 128)
I64("vcvtdq2pd xmm1, [rax]").check_simple_deref(1, Regs.RAX, 64)
I64("vcvtdq2pd ymm1, [rbx]").check_simple_deref(1, Regs.RBX, 128)
def test_lmem128_256(self):
I32("vlddqu xmm1, [eax]").check_simple_deref(1, Regs.EAX, 128)
I32("vlddqu ymm1, [eax]").check_simple_deref(1, Regs.EAX, 256)
I64("vlddqu xmm1, [r14]").check_simple_deref(1, Regs.R14, 128)
I64("vlddqu ymm1, [r13]").check_simple_deref(1, Regs.R13, 256)
class TestMisc(unittest.TestCase):
def test_lods(self):
a = I16("lodsb")
a.check_reg(0, Regs.AL, 8)
a.check_simple_deref(1, Regs.SI, 8)
self.assertEqual(a.inst.isSegmentDefault, True)
a = I32("lodsw")
a.check_reg(0, Regs.AX, 16)
a.check_simple_deref(1, Regs.ESI, 16)
self.assertEqual(a.inst.isSegmentDefault, True)
a = I32("lodsd")
a.check_reg(0, Regs.EAX, 32)
a.check_simple_deref(1, Regs.ESI, 32)
self.assertEqual(a.inst.isSegmentDefault, True)
a = I64("lodsq")
a.check_reg(0, Regs.RAX, 64)
a.check_simple_deref(1, Regs.RSI, 64)
self.assertEqual(a.inst.isSegmentDefault, False)
a = I16("db 0x2e\nlodsb")
a.check_reg(0, Regs.AL, 8)
a.check_simple_deref(1, Regs.SI, 8)
self.assertEqual(a.inst.segment, Regs.CS)
self.assertEqual(a.inst.isSegmentDefault, False)
a = I32("db 0x2e\nlodsw")
a.check_reg(0, Regs.AX, 16)
a.check_simple_deref(1, Regs.ESI, 16)
self.assertEqual(a.inst.segment, Regs.CS)
self.assertEqual(a.inst.isSegmentDefault, False)
a = I32("db 0x2e\nlodsd")
a.check_reg(0, Regs.EAX, 32)
a.check_simple_deref(1, Regs.ESI, 32)
self.assertEqual(a.inst.segment, Regs.CS)
self.assertEqual(a.inst.isSegmentDefault, False)
a = I64("db 0x65\nlodsq")
a.check_reg(0, Regs.RAX, 64)
a.check_simple_deref(1, Regs.RSI, 64)
self.assertEqual(a.inst.segment, Regs.GS)
self.assertEqual(a.inst.isSegmentDefault, False)
def test_stos(self):
a = I16("stosb")
a.check_simple_deref(0, Regs.DI, 8)
a.check_reg(1, Regs.AL, 8)
self.assertEqual(a.inst.isSegmentDefault, True)
a = I32("stosw")
a.check_simple_deref(0, Regs.EDI, 16)
a.check_reg(1, Regs.AX, 16)
self.assertEqual(a.inst.isSegmentDefault, True)
a = I32("stosd")
a.check_simple_deref(0, Regs.EDI, 32)
a.check_reg(1, Regs.EAX, 32)
self.assertEqual(a.inst.isSegmentDefault, True)
a = I64("stosq")
a.check_simple_deref(0, Regs.RDI, 64)
a.check_reg(1, Regs.RAX, 64)
self.assertEqual(a.inst.isSegmentDefault, False)
a = I16("db 0x2e\nstosb")
a.check_simple_deref(0, Regs.DI, 8)
a.check_reg(1, Regs.AL, 8)
self.assertEqual(a.inst.unusedPrefixesMask, 1)
self.assertEqual(a.inst.segment, Regs.ES)
self.assertEqual(a.inst.isSegmentDefault, True)
a = I32("db 0x2e\nstosw")
a.check_simple_deref(0, Regs.EDI, 16)
a.check_reg(1, Regs.AX, 16)
self.assertEqual(a.inst.unusedPrefixesMask, 1)
self.assertEqual(a.inst.segment, Regs.ES)
self.assertEqual(a.inst.isSegmentDefault, True)
a = I32("db 0x2e\nstosd")
a.check_simple_deref(0, Regs.EDI, 32)
a.check_reg(1, Regs.EAX, 32)
self.assertEqual(a.inst.unusedPrefixesMask, 1)
self.assertEqual(a.inst.segment, Regs.ES)
self.assertEqual(a.inst.isSegmentDefault, True)
a = I64("db 0x65\nstosq")
a.check_simple_deref(0, Regs.RDI, 64)
a.check_reg(1, Regs.RAX, 64)
self.assertEqual(a.inst.unusedPrefixesMask, 1)
self.assertEqual(a.inst.segment, REG_NONE)
def test_scas(self):
a = I16("scasb")
a.check_simple_deref(0, Regs.DI, 8)
a.check_reg(1, Regs.AL, 8)
self.assertEqual(a.inst.isSegmentDefault, True)
a = I32("scasw")
a.check_simple_deref(0, Regs.EDI, 16)
a.check_reg(1, Regs.AX, 16)
self.assertEqual(a.inst.isSegmentDefault, True)
a = I32("scasd")
a.check_simple_deref(0, Regs.EDI, 32)
a.check_reg(1, Regs.EAX, 32)
self.assertEqual(a.inst.isSegmentDefault, True)
a = I64("scasq")
a.check_simple_deref(0, Regs.RDI, 64)
a.check_reg(1, Regs.RAX, 64)
self.assertEqual(a.inst.isSegmentDefault, False)
a = I16("db 0x2e\nscasb")
a.check_simple_deref(0, Regs.DI, 8)
a.check_reg(1, Regs.AL, 8)
self.assertEqual(a.inst.unusedPrefixesMask, 1)
self.assertEqual(a.inst.segment, Regs.ES)
self.assertEqual(a.inst.isSegmentDefault, True)
a = I32("db 0x2e\nscasw")
a.check_simple_deref(0, Regs.EDI, 16)
a.check_reg(1, Regs.AX, 16)
self.assertEqual(a.inst.unusedPrefixesMask, 1)
self.assertEqual(a.inst.segment, Regs.ES)
self.assertEqual(a.inst.isSegmentDefault, True)
a = I32("db 0x2e\nscasd")
a.check_simple_deref(0, Regs.EDI, 32)
a.check_reg(1, Regs.EAX, 32)
self.assertEqual(a.inst.unusedPrefixesMask, 1)
self.assertEqual(a.inst.segment, Regs.ES)
self.assertEqual(a.inst.isSegmentDefault, True)
a = I64("db 0x65\nscasq")
a.check_simple_deref(0, Regs.RDI, 64)
a.check_reg(1, Regs.RAX, 64)
self.assertEqual(a.inst.unusedPrefixesMask, 1)
self.assertEqual(a.inst.segment, REG_NONE)
def test_cmps(self):
a = I64("cmpsd")
a.check_simple_deref(0, Regs.RSI, 32)
a.check_simple_deref(1, Regs.RDI, 32)
self.assertEqual(a.inst.unusedPrefixesMask, 0)
self.assertEqual(a.inst.segment, REG_NONE)
a = I16("db 0x2e\ncmpsb")
a.check_simple_deref(0, Regs.SI, 8)
a.check_simple_deref(1, Regs.DI, 8)
self.assertEqual(a.inst.unusedPrefixesMask, 0)
self.assertEqual(a.inst.segment, Regs.CS)
self.assertEqual(a.inst.isSegmentDefault, False)
def test_movs(self):
a = I32("movsd")
a.check_simple_deref(0, Regs.EDI, 32)
a.check_simple_deref(1, Regs.ESI, 32)
self.assertEqual(a.inst.unusedPrefixesMask, 0)
self.assertEqual(a.inst.segment, Regs.DS)
self.assertEqual(a.inst.isSegmentDefault, True)
a = I32("db 0x2e\nmovsw")
a.check_simple_deref(0, Regs.EDI, 16)
a.check_simple_deref(1, Regs.ESI, 16)
self.assertEqual(a.inst.unusedPrefixesMask, 0)
self.assertEqual(a.inst.segment, Regs.CS)
self.assertEqual(a.inst.isSegmentDefault, False)
def test_ins(self):
a = I32("db 0x65\ninsw")
a.check_simple_deref(0, Regs.EDI, 16)
a.check_reg(1, Regs.DX, 16)
self.assertEqual(a.inst.unusedPrefixesMask, 1)
self.assertEqual(a.inst.segment, Regs.ES)
self.assertEqual(a.inst.isSegmentDefault, True)
def test_outs(self):
a = I64("db 0x65\noutsd")
a.check_reg(0, Regs.DX, 16)
a.check_simple_deref(1, Regs.RSI, 32)
self.assertEqual(a.inst.segment, Regs.GS)
self.assertEqual(a.inst.isSegmentDefault, False)
def test_branch_hints(self):
self.failIf("FLAG_HINT_TAKEN" not in I32("db 0x3e\n jnz 0x50").inst.flags)
self.failIf("FLAG_HINT_NOT_TAKEN" not in I32("db 0x2e\n jp 0x55").inst.flags)
self.failIf("FLAG_HINT_NOT_TAKEN" not in I32("db 0x2e\n jo 0x55000").inst.flags)
self.failIf(I32("db 0x2e\n loop 0x55").inst.rawFlags & 0x1f, 0)
def test_mnemonic_by_vexw(self):
I32("vmovd xmm1, eax").check_mnemonic("VMOVD")
I64("vmovd xmm1, eax").check_reg(1, Regs.EAX, 32)
a = I64("vmovq xmm1, rax")
a.check_mnemonic("VMOVQ")
a.check_reg(1, Regs.RAX, 64)
def test_vexl_ignored(self):
I32("vaesdeclast xmm1, xmm2, xmm3").check_reg(0, Regs.XMM1, 128)
IB32("c4e26ddfcb").check_mnemonic("VAESDECLAST")
IB64("c4e26ddfcb").check_mnemonic("VAESDECLAST")
def test_vexl_needed(self):
I32("vinsertf128 ymm1, ymm2, xmm4, 0x42").check_mnemonic("VINSERTF128")
IB32("c4e36918cc42").check_invalid() # Without VEX.L.
IB64("c4e36918cc42").check_invalid() # Without VEX.L.
def test_force_reg0(self):
I32("extrq xmm1, 0x55, 0x66").check_mnemonic("EXTRQ")
I64("extrq xmm14, 0x55, 0x66").check_reg(0, Regs.XMM14, 128)
def test_pause(self):
self.assertEqual(I16("pause").inst.size, 2)
self.assertEqual(I32("pause").inst.size, 2)
self.assertEqual(I64("pause").inst.size, 2)
def test_nop(self):
self.assertEqual(I16("db 0x90").inst.size, 1)
self.assertEqual(I32("db 0x90").inst.size, 1)
self.assertEqual(I64("db 0x90").inst.size, 1)
self.assertEqual(I64("db 0x48, 0x90").inst.size, 2)
# XCHG R8D, EAX
a = I64("db 0x41, 0x90")
a.check_reg(0, Regs.R8D, 32)
a.check_reg(1, Regs.EAX, 32)
# XCHG R8, RAX
a = I64("db 0x49, 0x90")
a.check_reg(0, Regs.R8, 64)
a.check_reg(1, Regs.RAX, 64)
a = I64("db 0x4f, 0x90")
a.check_reg(0, Regs.R8, 64)
a.check_reg(1, Regs.RAX, 64)
def test_3dnow(self):
I32("pfadd mm4, [eax]").check_reg(0, Regs.MM4, 64)
I32("pfsub mm5, [eax]").check_reg(0, Regs.MM5, 64)
I32("pfrcpit1 mm1, [ebx]").check_mnemonic("PFRCPIT1")
I64("pavgusb mm1, mm2").check_mnemonic("PAVGUSB")
def test_all_segs(self):
I16("push fs").check_reg(0, Regs.FS, 16)
I16("push gs").check_reg(0, Regs.GS, 16)
I16("push ds").check_reg(0, Regs.DS, 16)
I16("push cs").check_reg(0, Regs.CS, 16)
I16("push ds").check_reg(0, Regs.DS, 16)
I16("push es").check_reg(0, Regs.ES, 16)
def test_op4(self):
a = I32("insertq xmm2, xmm1, 0x55, 0xaa")
a.check_reg(0, Regs.XMM2, 128)
a.check_reg(1, Regs.XMM1, 128)
a.check_type_size(2, distorm3.OPERAND_IMMEDIATE, 8)
self.assertEqual(a.inst.operands[2].value, 0x55)
a.check_type_size(3, distorm3.OPERAND_IMMEDIATE, 8)
self.assertEqual(a.inst.operands[3].value, 0xaa)
def test_pseudo_cmp(self):
I32("cmpps xmm2, xmm3, 0x7")
I64("cmpps xmm2, xmm4, 0x2")
def test_jmp_counters(self):
I16("jcxz 0x100")
I32("jecxz 0x100")
I64("jrcxz 0x100")
def test_natives(self):
self.assertEqual(I16("pusha").inst.size, 1)
self.assertEqual(I16("pushad").inst.size, 2)
self.assertEqual(I32("pusha").inst.size, 1)
self.assertEqual(I32("pushaw").inst.size, 2)
self.assertEqual(I16("CBW").inst.size, 1)
self.assertEqual(I32("CWDE").inst.size, 1)
self.assertEqual(I64("CDQE").inst.size, 2)
def test_modrm_based(self):
I32("movhlps xmm0, xmm1")
I32("movhps xmm0, [eax]")
I64("movhlps xmm0, xmm1")
I64("movhps xmm0, [eax]")
I64("movhlps xmm0, xmm1")
I64("movlps xmm0, [eax]")
def test_wait(self):
self.assertEqual(I16("wait").inst.size, 1)
def test_include_wait(self):
self.assertEqual(I16("db 0x9b\n db 0xd9\n db 0x30").inst.size, 3)
def test_loopxx_counters_size(self):
a = I16("loopz 0x50")
a.check_type_size(0,distorm3.OPERAND_IMMEDIATE, 8)
a.check_addr_size(16)
a = I32("loopz 0x50")
a.check_type_size(0,distorm3.OPERAND_IMMEDIATE, 8)
a.check_addr_size(32)
a = I64("loopz 0x50")
a.check_type_size(0,distorm3.OPERAND_IMMEDIATE, 8)
a.check_addr_size(64)
a = I16("db 0x67\n loopz 0x50")
a.check_type_size(0,distorm3.OPERAND_IMMEDIATE, 8)
a.check_addr_size(32)
a = I32("db 0x67\n loopz 0x50")
a.check_type_size(0,distorm3.OPERAND_IMMEDIATE, 8)
a.check_addr_size(16)
a = I64("db 0x67\n loopnz 0x50")
a.check_type_size(0,distorm3.OPERAND_IMMEDIATE, 8)
a.check_addr_size(32)
class TestPrefixes(unittest.TestCase):
Derefs16 = ["BX + SI", "BX + DI", "BP + SI", "BP + DI", "SI", "DI", "BP", "BX"]
Derefs32 = ["EAX", "ECX", "EDX", "EBX", "EBP", "ESI", "EDI"]
Bases = ["EAX", "ECX", "EDX", "EBX", "ESP", "ESI", "EDI"]
def test_without_seg(self):
self.assertEqual(I64("and [rip+0X5247], ch").inst.segment, REG_NONE)
self.assertEqual(I32("mov eax, [ebp*4]").inst.segment, Regs.DS)
self.assertEqual(I32("mov eax, [eax*4+ebp]").inst.segment, Regs.SS)
def test_default_seg16(self):
a = I16("mov [ds:0x1234], ax")
self.assertEqual(a.inst.segment, Regs.DS)
self.assertEqual(a.inst.isSegmentDefault, 1)
a = I16("mov [cs:0x1234], ax")
self.assertEqual(a.inst.segment, Regs.CS)
self.assertEqual(a.inst.isSegmentDefault, False)
def test_default_seg16_all(self):
for i in ["ADD [ds:%s], AX" % i for i in self.Derefs16]:
a = I16(i)
self.assertEqual(a.inst.segment, Regs.DS)
if i[8:10] == "BP":
self.assertEqual(a.inst.isSegmentDefault, False)
else:
self.assertEqual(a.inst.isSegmentDefault, True)
# Test with disp8
for i in ["ADD [ds:%s + 0x55], AX" % i for i in self.Derefs16]:
a = I16(i)
self.assertEqual(a.inst.segment, Regs.DS)
if i[8:10] == "BP":
self.assertEqual(a.inst.isSegmentDefault, False)
else:
self.assertEqual(a.inst.isSegmentDefault, True)
def test_default_seg32(self):
self.assertEqual(I32("mov [ds:0x12345678], eax").inst.segment, Regs.DS)
self.assertEqual(I32("mov [cs:0x12345678], eax").inst.segment, Regs.CS)
texts = ["ADD [ds:%s], EAX" % i for i in self.Derefs32]
for i in enumerate(texts):
a = I32(i[1])
self.assertEqual(a.inst.segment, Regs.DS)
if self.Derefs32[i[0]] == "EBP":
self.assertEqual(a.inst.isSegmentDefault, False)
else:
self.assertEqual(a.inst.isSegmentDefault, True)
# Test with disp8
texts = ["ADD [ds:%s + 0x55], EAX" % i for i in self.Derefs32]
for i in enumerate(texts):
a = I32(i[1])
self.assertEqual(a.inst.segment, Regs.DS)
if self.Derefs32[i[0]] == "EBP":
self.assertEqual(a.inst.isSegmentDefault, False)
else:
self.assertEqual(a.inst.isSegmentDefault, True)
def test_sib(self):
for i in enumerate(self.Derefs32):
for j in enumerate(self.Bases):
for s in [1, 2, 4, 8]:
a = I32("cmp ebp, [ds:%s*%d + %s]" % (i[1], s, j[1]))
a2 = I32("cmp ebp, [ds:%s*%d + %s + 0x55]" % (i[1], s, j[1]))
self.assertEqual(a.inst.segment, Regs.DS)
self.assertEqual(a2.inst.segment, Regs.DS)
if (j[1] == "EBP" or j[1] == "ESP"):
self.assertEqual(a.inst.isSegmentDefault, False)
self.assertEqual(a2.inst.isSegmentDefault, False)
else:
self.assertEqual(a.inst.isSegmentDefault, True)
self.assertEqual(a2.inst.isSegmentDefault, True)
def test_seg64(self):
self.assertEqual(I64("mov [gs:rip+0x12345678], eax").inst.segment, Regs.GS)
self.assertEqual(I64("mov [fs:0x12345678], eax").inst.segment, Regs.FS)
def test_lock(self):
self.failIf("FLAG_LOCK" not in I32("lock inc dword [eax]").inst.flags)
def test_repnz(self):
self.failIf("FLAG_REPNZ" not in I32("repnz scasb").inst.flags)
def test_rep(self):
self.failIf("FLAG_REP" not in I32("rep movsb").inst.flags)
def test_segment_override(self):
self.assertEqual(I32("mov eax, [cs:eax]").inst.segment, Regs.CS)
self.assertEqual(I32("mov eax, [ds:eax]").inst.segment, Regs.DS)
self.assertEqual(I32("mov eax, [es:eax]").inst.segment, Regs.ES)
self.assertEqual(I32("mov eax, [ss:eax]").inst.segment, Regs.SS)
self.assertEqual(I32("mov eax, [fs:eax]").inst.segment, Regs.FS)
self.assertEqual(I32("mov eax, [gs:eax]").inst.segment, Regs.GS)
def test_unused_normal(self):
self.assertEqual(IB64("4090").inst.unusedPrefixesMask, 1)
self.assertEqual(IB64("6790").inst.unusedPrefixesMask, 1)
self.assertEqual(IB64("6690").inst.unusedPrefixesMask, 1)
self.assertEqual(IB64("f290").inst.unusedPrefixesMask, 1)
self.assertEqual(IB64("f090").inst.unusedPrefixesMask, 1)
self.assertEqual(IB64("f3c3").inst.unusedPrefixesMask, 1)
self.assertEqual(IB64("64c3").inst.unusedPrefixesMask, 1)
def test_unused_doubles(self):
self.assertEqual(IB64("404090").inst.unusedPrefixesMask, 3)
self.assertEqual(IB64("676790").inst.unusedPrefixesMask, 3)
self.assertEqual(IB64("666690").inst.unusedPrefixesMask, 3)
self.assertEqual(IB64("f2f290").inst.unusedPrefixesMask, 3)
self.assertEqual(IB64("f0f090").inst.unusedPrefixesMask, 3)
self.assertEqual(IB64("f3f3c3").inst.unusedPrefixesMask, 3)
self.assertEqual(IB64("642ec3").inst.unusedPrefixesMask, 3)
def test_unused_sequences(self):
self.assertEqual(len(IB64("66"*15).insts), 15)
r = int(random.random() * 14)
self.assertEqual(IB64("66"*r + "90").inst.unusedPrefixesMask, (1 << r) - 1)
def test_rexw_66(self):
self.assertEqual(IB64("6648ffc0").inst.unusedPrefixesMask, 1)
self.assertEqual(IB64("6640ffc0").inst.unusedPrefixesMask, 2)
self.assertEqual(IB64("48660f10c0").inst.unusedPrefixesMask, 1)
self.assertEqual(IB64("664f0f10c0").inst.unusedPrefixesMask, 0)
class TestInvalid(unittest.TestCase):
def align(self):
for i in xrange(15):
IB32("90")
def test_filter_mem(self):
#cmpxchg8b eax
IB32("0fc7c8")
self.align()
def test_drop_prefixes(self):
# Drop prefixes when we encountered an instruction that couldn't be decoded.
IB32("666764ffff")
self.align()
def test_zzz_must_be_last_drop_prefixes(self):
# Drop prefixes when the last byte in stream is a prefix.
IB32("66")
class FlowControl:
""" The flow control instruction will be flagged in the lo nibble of the 'meta' field in _InstInfo of diStorm.
They are used to distinguish between flow control instructions (such as: ret, call, jmp, jz, etc) to normal ones. """
(CALL,
RET,
SYS,
BRANCH,
COND_BRANCH,
INT) = range(1, 7)
DF_MAXIMUM_ADDR16 = 1
DF_MAXIMUM_ADDR32 = 2
DF_RETURN_FC_ONLY = 4
DF_STOP_ON_CALL = 8
DF_STOP_ON_RET = 0x10
DF_STOP_ON_SYS = 0x20
DF_STOP_ON_BRANCH = 0x40
DF_STOP_ON_COND_BRANCH = 0x80
DF_STOP_ON_INT = 0x100
DF_STOP_ON_FLOW_CONTROL = (DF_STOP_ON_CALL | DF_STOP_ON_RET | DF_STOP_ON_SYS | DF_STOP_ON_BRANCH | DF_STOP_ON_COND_BRANCH | DF_STOP_ON_INT)
class TestFeatures(unittest.TestCase):
def test_addr16(self):
#I16("mov [-4], bx", 0, DF_MAXIMUM_ADDR16).check_disp(0, 0xfffc, 16, 16)
pass
def test_add32(self):
pass
def test_fc(self):
pairs = [
(["INT 5", "db 0xf1", "INT 3", "INTO", "UD2"], FlowControl.INT),
(["CALL 0x50", "CALL FAR [ebx]"], FlowControl.CALL),
(["RET", "IRET", "RETF"], FlowControl.RET),
(["SYSCALL", "SYSENTER", "SYSRET", "SYSEXIT"], FlowControl.SYS),
(["JMP 0x50", "JMP FAR [ebx]"], FlowControl.BRANCH),
(["JCXZ 0x50", "JO 0x50", "JNO 0x50", "JB 0x50", "JAE 0x50",
"JZ 0x50", "JNZ 0x50", "JBE 0x50", "JA 0x50", "JS 0x50",
"JNS 0x50", "JP 0x50", "JNP 0x50", "JL 0x50", "JGE 0x50",
"JLE 0x50", "JG 0x50", "LOOP 0x50", "LOOPZ 0x50", "LOOPNZ 0x50"], FlowControl.COND_BRANCH)
]
for i in pairs:
for j in i[0]:
a = I32(j + "\nnop", DF_STOP_ON_FLOW_CONTROL)
self.assertEqual(len(a.insts), 1)
self.assertEqual(a.inst["meta"] & 7, i[1])
a = I32("push eax\nnop\n" + j, DF_RETURN_FC_ONLY)
self.assertEqual(len(a.insts), 1)
a = I32("nop\nxor eax, eax\n" + j + "\ninc eax", DF_RETURN_FC_ONLY | DF_STOP_ON_FLOW_CONTROL)
self.assertEqual(len(a.insts), 1)
def test_filter(self):
pass
def GetNewSuite(className):
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(className))
return suite
def initfiles():
for i in ["bin16", "bin32", "bin64"]:
fbin.append(open("build\\linux\\"+i, "wb"))
if __name__ == "__main__":
random.seed()
#initfiles() # Used to emit the bytes of the tests - useful for code coverage input.
suite = unittest.TestSuite()
suite.addTest(GetNewSuite(TestMode16))
suite.addTest(GetNewSuite(TestMode32))
suite.addTest(GetNewSuite(TestMode64))
suite.addTest(GetNewSuite(TestInstTable))
suite.addTest(GetNewSuite(TestAVXOperands))
suite.addTest(GetNewSuite(TestMisc))
suite.addTest(GetNewSuite(TestPrefixes))
#suite.addTest(GetNewSuite(TestInvalid))
#suite.addTest(GetNewSuite(TestFeatures))
unittest.TextTestRunner(verbosity=1).run(suite)
| mit | 4,522,999,987,624,009,000 | 40.611715 | 163 | 0.66093 | false |
minhphung171093/GreenERP_V9 | openerp/addons/sale_service/__openerp__.py | 23 | 1356 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
{
'name': 'Create Tasks from SO',
'version': '1.0',
'category': 'Project Management',
'description': """
Automatically creates project tasks from procurement lines.
===========================================================
This module will automatically create a new task for each procurement order line
(e.g. for sale order lines), if the corresponding product meets the following
characteristics:
* Product Type = Service
* Create Task Automatically = True
If on top of that a project is specified on the product form (in the Procurement
tab), then the new task will be created in that specific project. Otherwise, the
new task will not belong to any project, and may be added to a project manually
later.
When the project task is completed or cancelled, the corresponding procurement
is updated accordingly. For example, if this procurement corresponds to a sale
order line, the sale order line will be considered delivered when the task is
completed.
""",
'website': 'https://www.odoo.com/page/crm',
'depends': ['project', 'sale', 'project_timesheet', 'sale_timesheet'],
'data': ['views/sale_service_view.xml'],
'demo': ['demo/sale_service_demo.xml'],
'installable': True,
'auto_install': True,
}
| gpl-3.0 | -5,719,672,554,160,219,000 | 36.666667 | 80 | 0.688791 | false |
fidomason/kbengine | kbe/src/lib/python/Lib/encodings/cp037.py | 266 | 13121 | """ Python Character Mapping Codec cp037 generated from 'MAPPINGS/VENDORS/MICSFT/EBCDIC/CP037.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_table)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_table)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='cp037',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Table
decoding_table = (
'\x00' # 0x00 -> NULL
'\x01' # 0x01 -> START OF HEADING
'\x02' # 0x02 -> START OF TEXT
'\x03' # 0x03 -> END OF TEXT
'\x9c' # 0x04 -> CONTROL
'\t' # 0x05 -> HORIZONTAL TABULATION
'\x86' # 0x06 -> CONTROL
'\x7f' # 0x07 -> DELETE
'\x97' # 0x08 -> CONTROL
'\x8d' # 0x09 -> CONTROL
'\x8e' # 0x0A -> CONTROL
'\x0b' # 0x0B -> VERTICAL TABULATION
'\x0c' # 0x0C -> FORM FEED
'\r' # 0x0D -> CARRIAGE RETURN
'\x0e' # 0x0E -> SHIFT OUT
'\x0f' # 0x0F -> SHIFT IN
'\x10' # 0x10 -> DATA LINK ESCAPE
'\x11' # 0x11 -> DEVICE CONTROL ONE
'\x12' # 0x12 -> DEVICE CONTROL TWO
'\x13' # 0x13 -> DEVICE CONTROL THREE
'\x9d' # 0x14 -> CONTROL
'\x85' # 0x15 -> CONTROL
'\x08' # 0x16 -> BACKSPACE
'\x87' # 0x17 -> CONTROL
'\x18' # 0x18 -> CANCEL
'\x19' # 0x19 -> END OF MEDIUM
'\x92' # 0x1A -> CONTROL
'\x8f' # 0x1B -> CONTROL
'\x1c' # 0x1C -> FILE SEPARATOR
'\x1d' # 0x1D -> GROUP SEPARATOR
'\x1e' # 0x1E -> RECORD SEPARATOR
'\x1f' # 0x1F -> UNIT SEPARATOR
'\x80' # 0x20 -> CONTROL
'\x81' # 0x21 -> CONTROL
'\x82' # 0x22 -> CONTROL
'\x83' # 0x23 -> CONTROL
'\x84' # 0x24 -> CONTROL
'\n' # 0x25 -> LINE FEED
'\x17' # 0x26 -> END OF TRANSMISSION BLOCK
'\x1b' # 0x27 -> ESCAPE
'\x88' # 0x28 -> CONTROL
'\x89' # 0x29 -> CONTROL
'\x8a' # 0x2A -> CONTROL
'\x8b' # 0x2B -> CONTROL
'\x8c' # 0x2C -> CONTROL
'\x05' # 0x2D -> ENQUIRY
'\x06' # 0x2E -> ACKNOWLEDGE
'\x07' # 0x2F -> BELL
'\x90' # 0x30 -> CONTROL
'\x91' # 0x31 -> CONTROL
'\x16' # 0x32 -> SYNCHRONOUS IDLE
'\x93' # 0x33 -> CONTROL
'\x94' # 0x34 -> CONTROL
'\x95' # 0x35 -> CONTROL
'\x96' # 0x36 -> CONTROL
'\x04' # 0x37 -> END OF TRANSMISSION
'\x98' # 0x38 -> CONTROL
'\x99' # 0x39 -> CONTROL
'\x9a' # 0x3A -> CONTROL
'\x9b' # 0x3B -> CONTROL
'\x14' # 0x3C -> DEVICE CONTROL FOUR
'\x15' # 0x3D -> NEGATIVE ACKNOWLEDGE
'\x9e' # 0x3E -> CONTROL
'\x1a' # 0x3F -> SUBSTITUTE
' ' # 0x40 -> SPACE
'\xa0' # 0x41 -> NO-BREAK SPACE
'\xe2' # 0x42 -> LATIN SMALL LETTER A WITH CIRCUMFLEX
'\xe4' # 0x43 -> LATIN SMALL LETTER A WITH DIAERESIS
'\xe0' # 0x44 -> LATIN SMALL LETTER A WITH GRAVE
'\xe1' # 0x45 -> LATIN SMALL LETTER A WITH ACUTE
'\xe3' # 0x46 -> LATIN SMALL LETTER A WITH TILDE
'\xe5' # 0x47 -> LATIN SMALL LETTER A WITH RING ABOVE
'\xe7' # 0x48 -> LATIN SMALL LETTER C WITH CEDILLA
'\xf1' # 0x49 -> LATIN SMALL LETTER N WITH TILDE
'\xa2' # 0x4A -> CENT SIGN
'.' # 0x4B -> FULL STOP
'<' # 0x4C -> LESS-THAN SIGN
'(' # 0x4D -> LEFT PARENTHESIS
'+' # 0x4E -> PLUS SIGN
'|' # 0x4F -> VERTICAL LINE
'&' # 0x50 -> AMPERSAND
'\xe9' # 0x51 -> LATIN SMALL LETTER E WITH ACUTE
'\xea' # 0x52 -> LATIN SMALL LETTER E WITH CIRCUMFLEX
'\xeb' # 0x53 -> LATIN SMALL LETTER E WITH DIAERESIS
'\xe8' # 0x54 -> LATIN SMALL LETTER E WITH GRAVE
'\xed' # 0x55 -> LATIN SMALL LETTER I WITH ACUTE
'\xee' # 0x56 -> LATIN SMALL LETTER I WITH CIRCUMFLEX
'\xef' # 0x57 -> LATIN SMALL LETTER I WITH DIAERESIS
'\xec' # 0x58 -> LATIN SMALL LETTER I WITH GRAVE
'\xdf' # 0x59 -> LATIN SMALL LETTER SHARP S (GERMAN)
'!' # 0x5A -> EXCLAMATION MARK
'$' # 0x5B -> DOLLAR SIGN
'*' # 0x5C -> ASTERISK
')' # 0x5D -> RIGHT PARENTHESIS
';' # 0x5E -> SEMICOLON
'\xac' # 0x5F -> NOT SIGN
'-' # 0x60 -> HYPHEN-MINUS
'/' # 0x61 -> SOLIDUS
'\xc2' # 0x62 -> LATIN CAPITAL LETTER A WITH CIRCUMFLEX
'\xc4' # 0x63 -> LATIN CAPITAL LETTER A WITH DIAERESIS
'\xc0' # 0x64 -> LATIN CAPITAL LETTER A WITH GRAVE
'\xc1' # 0x65 -> LATIN CAPITAL LETTER A WITH ACUTE
'\xc3' # 0x66 -> LATIN CAPITAL LETTER A WITH TILDE
'\xc5' # 0x67 -> LATIN CAPITAL LETTER A WITH RING ABOVE
'\xc7' # 0x68 -> LATIN CAPITAL LETTER C WITH CEDILLA
'\xd1' # 0x69 -> LATIN CAPITAL LETTER N WITH TILDE
'\xa6' # 0x6A -> BROKEN BAR
',' # 0x6B -> COMMA
'%' # 0x6C -> PERCENT SIGN
'_' # 0x6D -> LOW LINE
'>' # 0x6E -> GREATER-THAN SIGN
'?' # 0x6F -> QUESTION MARK
'\xf8' # 0x70 -> LATIN SMALL LETTER O WITH STROKE
'\xc9' # 0x71 -> LATIN CAPITAL LETTER E WITH ACUTE
'\xca' # 0x72 -> LATIN CAPITAL LETTER E WITH CIRCUMFLEX
'\xcb' # 0x73 -> LATIN CAPITAL LETTER E WITH DIAERESIS
'\xc8' # 0x74 -> LATIN CAPITAL LETTER E WITH GRAVE
'\xcd' # 0x75 -> LATIN CAPITAL LETTER I WITH ACUTE
'\xce' # 0x76 -> LATIN CAPITAL LETTER I WITH CIRCUMFLEX
'\xcf' # 0x77 -> LATIN CAPITAL LETTER I WITH DIAERESIS
'\xcc' # 0x78 -> LATIN CAPITAL LETTER I WITH GRAVE
'`' # 0x79 -> GRAVE ACCENT
':' # 0x7A -> COLON
'#' # 0x7B -> NUMBER SIGN
'@' # 0x7C -> COMMERCIAL AT
"'" # 0x7D -> APOSTROPHE
'=' # 0x7E -> EQUALS SIGN
'"' # 0x7F -> QUOTATION MARK
'\xd8' # 0x80 -> LATIN CAPITAL LETTER O WITH STROKE
'a' # 0x81 -> LATIN SMALL LETTER A
'b' # 0x82 -> LATIN SMALL LETTER B
'c' # 0x83 -> LATIN SMALL LETTER C
'd' # 0x84 -> LATIN SMALL LETTER D
'e' # 0x85 -> LATIN SMALL LETTER E
'f' # 0x86 -> LATIN SMALL LETTER F
'g' # 0x87 -> LATIN SMALL LETTER G
'h' # 0x88 -> LATIN SMALL LETTER H
'i' # 0x89 -> LATIN SMALL LETTER I
'\xab' # 0x8A -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
'\xbb' # 0x8B -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
'\xf0' # 0x8C -> LATIN SMALL LETTER ETH (ICELANDIC)
'\xfd' # 0x8D -> LATIN SMALL LETTER Y WITH ACUTE
'\xfe' # 0x8E -> LATIN SMALL LETTER THORN (ICELANDIC)
'\xb1' # 0x8F -> PLUS-MINUS SIGN
'\xb0' # 0x90 -> DEGREE SIGN
'j' # 0x91 -> LATIN SMALL LETTER J
'k' # 0x92 -> LATIN SMALL LETTER K
'l' # 0x93 -> LATIN SMALL LETTER L
'm' # 0x94 -> LATIN SMALL LETTER M
'n' # 0x95 -> LATIN SMALL LETTER N
'o' # 0x96 -> LATIN SMALL LETTER O
'p' # 0x97 -> LATIN SMALL LETTER P
'q' # 0x98 -> LATIN SMALL LETTER Q
'r' # 0x99 -> LATIN SMALL LETTER R
'\xaa' # 0x9A -> FEMININE ORDINAL INDICATOR
'\xba' # 0x9B -> MASCULINE ORDINAL INDICATOR
'\xe6' # 0x9C -> LATIN SMALL LIGATURE AE
'\xb8' # 0x9D -> CEDILLA
'\xc6' # 0x9E -> LATIN CAPITAL LIGATURE AE
'\xa4' # 0x9F -> CURRENCY SIGN
'\xb5' # 0xA0 -> MICRO SIGN
'~' # 0xA1 -> TILDE
's' # 0xA2 -> LATIN SMALL LETTER S
't' # 0xA3 -> LATIN SMALL LETTER T
'u' # 0xA4 -> LATIN SMALL LETTER U
'v' # 0xA5 -> LATIN SMALL LETTER V
'w' # 0xA6 -> LATIN SMALL LETTER W
'x' # 0xA7 -> LATIN SMALL LETTER X
'y' # 0xA8 -> LATIN SMALL LETTER Y
'z' # 0xA9 -> LATIN SMALL LETTER Z
'\xa1' # 0xAA -> INVERTED EXCLAMATION MARK
'\xbf' # 0xAB -> INVERTED QUESTION MARK
'\xd0' # 0xAC -> LATIN CAPITAL LETTER ETH (ICELANDIC)
'\xdd' # 0xAD -> LATIN CAPITAL LETTER Y WITH ACUTE
'\xde' # 0xAE -> LATIN CAPITAL LETTER THORN (ICELANDIC)
'\xae' # 0xAF -> REGISTERED SIGN
'^' # 0xB0 -> CIRCUMFLEX ACCENT
'\xa3' # 0xB1 -> POUND SIGN
'\xa5' # 0xB2 -> YEN SIGN
'\xb7' # 0xB3 -> MIDDLE DOT
'\xa9' # 0xB4 -> COPYRIGHT SIGN
'\xa7' # 0xB5 -> SECTION SIGN
'\xb6' # 0xB6 -> PILCROW SIGN
'\xbc' # 0xB7 -> VULGAR FRACTION ONE QUARTER
'\xbd' # 0xB8 -> VULGAR FRACTION ONE HALF
'\xbe' # 0xB9 -> VULGAR FRACTION THREE QUARTERS
'[' # 0xBA -> LEFT SQUARE BRACKET
']' # 0xBB -> RIGHT SQUARE BRACKET
'\xaf' # 0xBC -> MACRON
'\xa8' # 0xBD -> DIAERESIS
'\xb4' # 0xBE -> ACUTE ACCENT
'\xd7' # 0xBF -> MULTIPLICATION SIGN
'{' # 0xC0 -> LEFT CURLY BRACKET
'A' # 0xC1 -> LATIN CAPITAL LETTER A
'B' # 0xC2 -> LATIN CAPITAL LETTER B
'C' # 0xC3 -> LATIN CAPITAL LETTER C
'D' # 0xC4 -> LATIN CAPITAL LETTER D
'E' # 0xC5 -> LATIN CAPITAL LETTER E
'F' # 0xC6 -> LATIN CAPITAL LETTER F
'G' # 0xC7 -> LATIN CAPITAL LETTER G
'H' # 0xC8 -> LATIN CAPITAL LETTER H
'I' # 0xC9 -> LATIN CAPITAL LETTER I
'\xad' # 0xCA -> SOFT HYPHEN
'\xf4' # 0xCB -> LATIN SMALL LETTER O WITH CIRCUMFLEX
'\xf6' # 0xCC -> LATIN SMALL LETTER O WITH DIAERESIS
'\xf2' # 0xCD -> LATIN SMALL LETTER O WITH GRAVE
'\xf3' # 0xCE -> LATIN SMALL LETTER O WITH ACUTE
'\xf5' # 0xCF -> LATIN SMALL LETTER O WITH TILDE
'}' # 0xD0 -> RIGHT CURLY BRACKET
'J' # 0xD1 -> LATIN CAPITAL LETTER J
'K' # 0xD2 -> LATIN CAPITAL LETTER K
'L' # 0xD3 -> LATIN CAPITAL LETTER L
'M' # 0xD4 -> LATIN CAPITAL LETTER M
'N' # 0xD5 -> LATIN CAPITAL LETTER N
'O' # 0xD6 -> LATIN CAPITAL LETTER O
'P' # 0xD7 -> LATIN CAPITAL LETTER P
'Q' # 0xD8 -> LATIN CAPITAL LETTER Q
'R' # 0xD9 -> LATIN CAPITAL LETTER R
'\xb9' # 0xDA -> SUPERSCRIPT ONE
'\xfb' # 0xDB -> LATIN SMALL LETTER U WITH CIRCUMFLEX
'\xfc' # 0xDC -> LATIN SMALL LETTER U WITH DIAERESIS
'\xf9' # 0xDD -> LATIN SMALL LETTER U WITH GRAVE
'\xfa' # 0xDE -> LATIN SMALL LETTER U WITH ACUTE
'\xff' # 0xDF -> LATIN SMALL LETTER Y WITH DIAERESIS
'\\' # 0xE0 -> REVERSE SOLIDUS
'\xf7' # 0xE1 -> DIVISION SIGN
'S' # 0xE2 -> LATIN CAPITAL LETTER S
'T' # 0xE3 -> LATIN CAPITAL LETTER T
'U' # 0xE4 -> LATIN CAPITAL LETTER U
'V' # 0xE5 -> LATIN CAPITAL LETTER V
'W' # 0xE6 -> LATIN CAPITAL LETTER W
'X' # 0xE7 -> LATIN CAPITAL LETTER X
'Y' # 0xE8 -> LATIN CAPITAL LETTER Y
'Z' # 0xE9 -> LATIN CAPITAL LETTER Z
'\xb2' # 0xEA -> SUPERSCRIPT TWO
'\xd4' # 0xEB -> LATIN CAPITAL LETTER O WITH CIRCUMFLEX
'\xd6' # 0xEC -> LATIN CAPITAL LETTER O WITH DIAERESIS
'\xd2' # 0xED -> LATIN CAPITAL LETTER O WITH GRAVE
'\xd3' # 0xEE -> LATIN CAPITAL LETTER O WITH ACUTE
'\xd5' # 0xEF -> LATIN CAPITAL LETTER O WITH TILDE
'0' # 0xF0 -> DIGIT ZERO
'1' # 0xF1 -> DIGIT ONE
'2' # 0xF2 -> DIGIT TWO
'3' # 0xF3 -> DIGIT THREE
'4' # 0xF4 -> DIGIT FOUR
'5' # 0xF5 -> DIGIT FIVE
'6' # 0xF6 -> DIGIT SIX
'7' # 0xF7 -> DIGIT SEVEN
'8' # 0xF8 -> DIGIT EIGHT
'9' # 0xF9 -> DIGIT NINE
'\xb3' # 0xFA -> SUPERSCRIPT THREE
'\xdb' # 0xFB -> LATIN CAPITAL LETTER U WITH CIRCUMFLEX
'\xdc' # 0xFC -> LATIN CAPITAL LETTER U WITH DIAERESIS
'\xd9' # 0xFD -> LATIN CAPITAL LETTER U WITH GRAVE
'\xda' # 0xFE -> LATIN CAPITAL LETTER U WITH ACUTE
'\x9f' # 0xFF -> CONTROL
)
### Encoding table
encoding_table=codecs.charmap_build(decoding_table)
| lgpl-3.0 | 7,037,566,738,572,221,000 | 41.739414 | 116 | 0.519092 | false |
luistorresm/odoo | addons/email_template/email_template.py | 196 | 30189 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2009 Sharoon Thomas
# Copyright (C) 2010-Today OpenERP SA (<http://www.openerp.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
#
##############################################################################
import base64
import datetime
import dateutil.relativedelta as relativedelta
import logging
import lxml
import urlparse
import openerp
from openerp import SUPERUSER_ID
from openerp.osv import osv, fields
from openerp import tools, api
from openerp.tools.translate import _
from urllib import urlencode, quote as quote
_logger = logging.getLogger(__name__)
def format_tz(pool, cr, uid, dt, tz=False, format=False, context=None):
context = dict(context or {})
if tz:
context['tz'] = tz or pool.get('res.users').read(cr, SUPERUSER_ID, uid, ['tz'])['tz'] or "UTC"
timestamp = datetime.datetime.strptime(dt, tools.DEFAULT_SERVER_DATETIME_FORMAT)
ts = fields.datetime.context_timestamp(cr, uid, timestamp, context)
if format:
return ts.strftime(format)
else:
lang = context.get("lang")
lang_params = {}
if lang:
res_lang = pool.get('res.lang')
ids = res_lang.search(cr, uid, [("code", "=", lang)])
if ids:
lang_params = res_lang.read(cr, uid, ids[0], ["date_format", "time_format"])
format_date = lang_params.get("date_format", '%B-%d-%Y')
format_time = lang_params.get("time_format", '%I-%M %p')
fdate = ts.strftime(format_date)
ftime = ts.strftime(format_time)
return "%s %s%s" % (fdate, ftime, (' (%s)' % tz) if tz else '')
try:
# We use a jinja2 sandboxed environment to render mako templates.
# Note that the rendering does not cover all the mako syntax, in particular
# arbitrary Python statements are not accepted, and not all expressions are
# allowed: only "public" attributes (not starting with '_') of objects may
# be accessed.
# This is done on purpose: it prevents incidental or malicious execution of
# Python code that may break the security of the server.
from jinja2.sandbox import SandboxedEnvironment
mako_template_env = SandboxedEnvironment(
block_start_string="<%",
block_end_string="%>",
variable_start_string="${",
variable_end_string="}",
comment_start_string="<%doc>",
comment_end_string="</%doc>",
line_statement_prefix="%",
line_comment_prefix="##",
trim_blocks=True, # do not output newline after blocks
autoescape=True, # XML/HTML automatic escaping
)
mako_template_env.globals.update({
'str': str,
'quote': quote,
'urlencode': urlencode,
'datetime': datetime,
'len': len,
'abs': abs,
'min': min,
'max': max,
'sum': sum,
'filter': filter,
'reduce': reduce,
'map': map,
'round': round,
# dateutil.relativedelta is an old-style class and cannot be directly
# instanciated wihtin a jinja2 expression, so a lambda "proxy" is
# is needed, apparently.
'relativedelta': lambda *a, **kw : relativedelta.relativedelta(*a, **kw),
})
except ImportError:
_logger.warning("jinja2 not available, templating features will not work!")
class email_template(osv.osv):
"Templates for sending email"
_name = "email.template"
_description = 'Email Templates'
_order = 'name'
def default_get(self, cr, uid, fields, context=None):
res = super(email_template, self).default_get(cr, uid, fields, context)
if res.get('model'):
res['model_id'] = self.pool['ir.model'].search(cr, uid, [('model', '=', res.pop('model'))], context=context)[0]
return res
def _replace_local_links(self, cr, uid, html, context=None):
""" Post-processing of html content to replace local links to absolute
links, using web.base.url as base url. """
if not html:
return html
# form a tree
root = lxml.html.fromstring(html)
if not len(root) and root.text is None and root.tail is None:
html = '<div>%s</div>' % html
root = lxml.html.fromstring(html)
base_url = self.pool['ir.config_parameter'].get_param(cr, uid, 'web.base.url')
(base_scheme, base_netloc, bpath, bparams, bquery, bfragment) = urlparse.urlparse(base_url)
def _process_link(url):
new_url = url
(scheme, netloc, path, params, query, fragment) = urlparse.urlparse(url)
if not scheme and not netloc:
new_url = urlparse.urlunparse((base_scheme, base_netloc, path, params, query, fragment))
return new_url
# check all nodes, replace :
# - img src -> check URL
# - a href -> check URL
for node in root.iter():
if node.tag == 'a' and node.get('href'):
node.set('href', _process_link(node.get('href')))
elif node.tag == 'img' and not node.get('src', 'data').startswith('data'):
node.set('src', _process_link(node.get('src')))
html = lxml.html.tostring(root, pretty_print=False, method='html')
# this is ugly, but lxml/etree tostring want to put everything in a 'div' that breaks the editor -> remove that
if html.startswith('<div>') and html.endswith('</div>'):
html = html[5:-6]
return html
def render_post_process(self, cr, uid, html, context=None):
html = self._replace_local_links(cr, uid, html, context=context)
return html
def render_template_batch(self, cr, uid, template, model, res_ids, context=None, post_process=False):
"""Render the given template text, replace mako expressions ``${expr}``
with the result of evaluating these expressions with
an evaluation context containing:
* ``user``: browse_record of the current user
* ``object``: browse_record of the document record this mail is
related to
* ``context``: the context passed to the mail composition wizard
:param str template: the template text to render
:param str model: model name of the document record this mail is related to.
:param int res_ids: list of ids of document records those mails are related to.
"""
if context is None:
context = {}
res_ids = filter(None, res_ids) # to avoid browsing [None] below
results = dict.fromkeys(res_ids, u"")
# try to load the template
try:
template = mako_template_env.from_string(tools.ustr(template))
except Exception:
_logger.exception("Failed to load template %r", template)
return results
# prepare template variables
user = self.pool.get('res.users').browse(cr, uid, uid, context=context)
records = self.pool[model].browse(cr, uid, res_ids, context=context) or [None]
variables = {
'format_tz': lambda dt, tz=False, format=False, context=context: format_tz(self.pool, cr, uid, dt, tz, format, context),
'user': user,
'ctx': context, # context kw would clash with mako internals
}
for record in records:
res_id = record.id if record else None
variables['object'] = record
try:
render_result = template.render(variables)
except Exception:
_logger.exception("Failed to render template %r using values %r" % (template, variables))
render_result = u""
if render_result == u"False":
render_result = u""
results[res_id] = render_result
if post_process:
for res_id, result in results.iteritems():
results[res_id] = self.render_post_process(cr, uid, result, context=context)
return results
def get_email_template_batch(self, cr, uid, template_id=False, res_ids=None, context=None):
if context is None:
context = {}
if res_ids is None:
res_ids = [None]
results = dict.fromkeys(res_ids, False)
if not template_id:
return results
template = self.browse(cr, uid, template_id, context)
langs = self.render_template_batch(cr, uid, template.lang, template.model, res_ids, context)
for res_id, lang in langs.iteritems():
if lang:
# Use translated template if necessary
ctx = context.copy()
ctx['lang'] = lang
template = self.browse(cr, uid, template.id, ctx)
else:
template = self.browse(cr, uid, int(template_id), context)
results[res_id] = template
return results
def onchange_model_id(self, cr, uid, ids, model_id, context=None):
mod_name = False
if model_id:
mod_name = self.pool.get('ir.model').browse(cr, uid, model_id, context).model
return {'value': {'model': mod_name}}
_columns = {
'name': fields.char('Name'),
'model_id': fields.many2one('ir.model', 'Applies to', help="The kind of document with with this template can be used"),
'model': fields.related('model_id', 'model', type='char', string='Related Document Model',
select=True, store=True, readonly=True),
'lang': fields.char('Language',
help="Optional translation language (ISO code) to select when sending out an email. "
"If not set, the english version will be used. "
"This should usually be a placeholder expression "
"that provides the appropriate language, e.g. "
"${object.partner_id.lang}.",
placeholder="${object.partner_id.lang}"),
'user_signature': fields.boolean('Add Signature',
help="If checked, the user's signature will be appended to the text version "
"of the message"),
'subject': fields.char('Subject', translate=True, help="Subject (placeholders may be used here)",),
'email_from': fields.char('From',
help="Sender address (placeholders may be used here). If not set, the default "
"value will be the author's email alias if configured, or email address."),
'use_default_to': fields.boolean(
'Default recipients',
help="Default recipients of the record:\n"
"- partner (using id on a partner or the partner_id field) OR\n"
"- email (using email_from or email field)"),
'email_to': fields.char('To (Emails)', help="Comma-separated recipient addresses (placeholders may be used here)"),
'partner_to': fields.char('To (Partners)',
help="Comma-separated ids of recipient partners (placeholders may be used here)",
oldname='email_recipients'),
'email_cc': fields.char('Cc', help="Carbon copy recipients (placeholders may be used here)"),
'reply_to': fields.char('Reply-To', help="Preferred response address (placeholders may be used here)"),
'mail_server_id': fields.many2one('ir.mail_server', 'Outgoing Mail Server', readonly=False,
help="Optional preferred server for outgoing mails. If not set, the highest "
"priority one will be used."),
'body_html': fields.html('Body', translate=True, sanitize=False, help="Rich-text/HTML version of the message (placeholders may be used here)"),
'report_name': fields.char('Report Filename', translate=True,
help="Name to use for the generated report file (may contain placeholders)\n"
"The extension can be omitted and will then come from the report type."),
'report_template': fields.many2one('ir.actions.report.xml', 'Optional report to print and attach'),
'ref_ir_act_window': fields.many2one('ir.actions.act_window', 'Sidebar action', readonly=True, copy=False,
help="Sidebar action to make this template available on records "
"of the related document model"),
'ref_ir_value': fields.many2one('ir.values', 'Sidebar Button', readonly=True, copy=False,
help="Sidebar button to open the sidebar action"),
'attachment_ids': fields.many2many('ir.attachment', 'email_template_attachment_rel', 'email_template_id',
'attachment_id', 'Attachments',
help="You may attach files to this template, to be added to all "
"emails created from this template"),
'auto_delete': fields.boolean('Auto Delete', help="Permanently delete this email after sending it, to save space"),
# Fake fields used to implement the placeholder assistant
'model_object_field': fields.many2one('ir.model.fields', string="Field",
help="Select target field from the related document model.\n"
"If it is a relationship field you will be able to select "
"a target field at the destination of the relationship."),
'sub_object': fields.many2one('ir.model', 'Sub-model', readonly=True,
help="When a relationship field is selected as first field, "
"this field shows the document model the relationship goes to."),
'sub_model_object_field': fields.many2one('ir.model.fields', 'Sub-field',
help="When a relationship field is selected as first field, "
"this field lets you select the target field within the "
"destination document model (sub-model)."),
'null_value': fields.char('Default Value', help="Optional value to use if the target field is empty"),
'copyvalue': fields.char('Placeholder Expression', help="Final placeholder expression, to be copy-pasted in the desired template field."),
}
_defaults = {
'auto_delete': True,
}
def create_action(self, cr, uid, ids, context=None):
action_obj = self.pool.get('ir.actions.act_window')
data_obj = self.pool.get('ir.model.data')
for template in self.browse(cr, uid, ids, context=context):
src_obj = template.model_id.model
model_data_id = data_obj._get_id(cr, uid, 'mail', 'email_compose_message_wizard_form')
res_id = data_obj.browse(cr, uid, model_data_id, context=context).res_id
button_name = _('Send Mail (%s)') % template.name
act_id = action_obj.create(cr, SUPERUSER_ID, {
'name': button_name,
'type': 'ir.actions.act_window',
'res_model': 'mail.compose.message',
'src_model': src_obj,
'view_type': 'form',
'context': "{'default_composition_mode': 'mass_mail', 'default_template_id' : %d, 'default_use_template': True}" % (template.id),
'view_mode':'form,tree',
'view_id': res_id,
'target': 'new',
'auto_refresh':1
}, context)
ir_values_id = self.pool.get('ir.values').create(cr, SUPERUSER_ID, {
'name': button_name,
'model': src_obj,
'key2': 'client_action_multi',
'value': "ir.actions.act_window,%s" % act_id,
'object': True,
}, context)
template.write({
'ref_ir_act_window': act_id,
'ref_ir_value': ir_values_id,
})
return True
def unlink_action(self, cr, uid, ids, context=None):
for template in self.browse(cr, uid, ids, context=context):
try:
if template.ref_ir_act_window:
self.pool.get('ir.actions.act_window').unlink(cr, SUPERUSER_ID, template.ref_ir_act_window.id, context)
if template.ref_ir_value:
ir_values_obj = self.pool.get('ir.values')
ir_values_obj.unlink(cr, SUPERUSER_ID, template.ref_ir_value.id, context)
except Exception:
raise osv.except_osv(_("Warning"), _("Deletion of the action record failed."))
return True
def unlink(self, cr, uid, ids, context=None):
self.unlink_action(cr, uid, ids, context=context)
return super(email_template, self).unlink(cr, uid, ids, context=context)
def copy(self, cr, uid, id, default=None, context=None):
template = self.browse(cr, uid, id, context=context)
default = dict(default or {},
name=_("%s (copy)") % template.name)
return super(email_template, self).copy(cr, uid, id, default, context)
def build_expression(self, field_name, sub_field_name, null_value):
"""Returns a placeholder expression for use in a template field,
based on the values provided in the placeholder assistant.
:param field_name: main field name
:param sub_field_name: sub field name (M2O)
:param null_value: default value if the target value is empty
:return: final placeholder expression
"""
expression = ''
if field_name:
expression = "${object." + field_name
if sub_field_name:
expression += "." + sub_field_name
if null_value:
expression += " or '''%s'''" % null_value
expression += "}"
return expression
def onchange_sub_model_object_value_field(self, cr, uid, ids, model_object_field, sub_model_object_field=False, null_value=None, context=None):
result = {
'sub_object': False,
'copyvalue': False,
'sub_model_object_field': False,
'null_value': False
}
if model_object_field:
fields_obj = self.pool.get('ir.model.fields')
field_value = fields_obj.browse(cr, uid, model_object_field, context)
if field_value.ttype in ['many2one', 'one2many', 'many2many']:
res_ids = self.pool.get('ir.model').search(cr, uid, [('model', '=', field_value.relation)], context=context)
sub_field_value = False
if sub_model_object_field:
sub_field_value = fields_obj.browse(cr, uid, sub_model_object_field, context)
if res_ids:
result.update({
'sub_object': res_ids[0],
'copyvalue': self.build_expression(field_value.name, sub_field_value and sub_field_value.name or False, null_value or False),
'sub_model_object_field': sub_model_object_field or False,
'null_value': null_value or False
})
else:
result.update({
'copyvalue': self.build_expression(field_value.name, False, null_value or False),
'null_value': null_value or False
})
return {'value': result}
def generate_recipients_batch(self, cr, uid, results, template_id, res_ids, context=None):
"""Generates the recipients of the template. Default values can ben generated
instead of the template values if requested by template or context.
Emails (email_to, email_cc) can be transformed into partners if requested
in the context. """
if context is None:
context = {}
template = self.browse(cr, uid, template_id, context=context)
if template.use_default_to or context.get('tpl_force_default_to'):
ctx = dict(context, thread_model=template.model)
default_recipients = self.pool['mail.thread'].message_get_default_recipients(cr, uid, res_ids, context=ctx)
for res_id, recipients in default_recipients.iteritems():
results[res_id].pop('partner_to', None)
results[res_id].update(recipients)
for res_id, values in results.iteritems():
partner_ids = values.get('partner_ids', list())
if context and context.get('tpl_partners_only'):
mails = tools.email_split(values.pop('email_to', '')) + tools.email_split(values.pop('email_cc', ''))
for mail in mails:
partner_id = self.pool.get('res.partner').find_or_create(cr, uid, mail, context=context)
partner_ids.append(partner_id)
partner_to = values.pop('partner_to', '')
if partner_to:
# placeholders could generate '', 3, 2 due to some empty field values
tpl_partner_ids = [int(pid) for pid in partner_to.split(',') if pid]
partner_ids += self.pool['res.partner'].exists(cr, SUPERUSER_ID, tpl_partner_ids, context=context)
results[res_id]['partner_ids'] = partner_ids
return results
def generate_email_batch(self, cr, uid, template_id, res_ids, context=None, fields=None):
"""Generates an email from the template for given the given model based on
records given by res_ids.
:param template_id: id of the template to render.
:param res_id: id of the record to use for rendering the template (model
is taken from template definition)
:returns: a dict containing all relevant fields for creating a new
mail.mail entry, with one extra key ``attachments``, in the
format [(report_name, data)] where data is base64 encoded.
"""
if context is None:
context = {}
if fields is None:
fields = ['subject', 'body_html', 'email_from', 'email_to', 'partner_to', 'email_cc', 'reply_to']
report_xml_pool = self.pool.get('ir.actions.report.xml')
res_ids_to_templates = self.get_email_template_batch(cr, uid, template_id, res_ids, context)
# templates: res_id -> template; template -> res_ids
templates_to_res_ids = {}
for res_id, template in res_ids_to_templates.iteritems():
templates_to_res_ids.setdefault(template, []).append(res_id)
results = dict()
for template, template_res_ids in templates_to_res_ids.iteritems():
# generate fields value for all res_ids linked to the current template
ctx = context.copy()
if template.lang:
ctx['lang'] = template._context.get('lang')
for field in fields:
generated_field_values = self.render_template_batch(
cr, uid, getattr(template, field), template.model, template_res_ids,
post_process=(field == 'body_html'),
context=ctx)
for res_id, field_value in generated_field_values.iteritems():
results.setdefault(res_id, dict())[field] = field_value
# compute recipients
results = self.generate_recipients_batch(cr, uid, results, template.id, template_res_ids, context=context)
# update values for all res_ids
for res_id in template_res_ids:
values = results[res_id]
# body: add user signature, sanitize
if 'body_html' in fields and template.user_signature:
signature = self.pool.get('res.users').browse(cr, uid, uid, context).signature
if signature:
values['body_html'] = tools.append_content_to_html(values['body_html'], signature, plaintext=False)
if values.get('body_html'):
values['body'] = tools.html_sanitize(values['body_html'])
# technical settings
values.update(
mail_server_id=template.mail_server_id.id or False,
auto_delete=template.auto_delete,
model=template.model,
res_id=res_id or False,
attachment_ids=[attach.id for attach in template.attachment_ids],
)
# Add report in attachments: generate once for all template_res_ids
if template.report_template:
for res_id in template_res_ids:
attachments = []
report_name = self.render_template(cr, uid, template.report_name, template.model, res_id, context=ctx)
report = report_xml_pool.browse(cr, uid, template.report_template.id, context)
report_service = report.report_name
if report.report_type in ['qweb-html', 'qweb-pdf']:
result, format = self.pool['report'].get_pdf(cr, uid, [res_id], report_service, context=ctx), 'pdf'
else:
result, format = openerp.report.render_report(cr, uid, [res_id], report_service, {'model': template.model}, ctx)
# TODO in trunk, change return format to binary to match message_post expected format
result = base64.b64encode(result)
if not report_name:
report_name = 'report.' + report_service
ext = "." + format
if not report_name.endswith(ext):
report_name += ext
attachments.append((report_name, result))
results[res_id]['attachments'] = attachments
return results
@api.cr_uid_id_context
def send_mail(self, cr, uid, template_id, res_id, force_send=False, raise_exception=False, context=None):
"""Generates a new mail message for the given template and record,
and schedules it for delivery through the ``mail`` module's scheduler.
:param int template_id: id of the template to render
:param int res_id: id of the record to render the template with
(model is taken from the template)
:param bool force_send: if True, the generated mail.message is
immediately sent after being created, as if the scheduler
was executed for this message only.
:returns: id of the mail.message that was created
"""
if context is None:
context = {}
mail_mail = self.pool.get('mail.mail')
ir_attachment = self.pool.get('ir.attachment')
# create a mail_mail based on values, without attachments
values = self.generate_email(cr, uid, template_id, res_id, context=context)
if not values.get('email_from'):
raise osv.except_osv(_('Warning!'), _("Sender email is missing or empty after template rendering. Specify one to deliver your message"))
values['recipient_ids'] = [(4, pid) for pid in values.get('partner_ids', list())]
attachment_ids = values.pop('attachment_ids', [])
attachments = values.pop('attachments', [])
msg_id = mail_mail.create(cr, uid, values, context=context)
mail = mail_mail.browse(cr, uid, msg_id, context=context)
# manage attachments
for attachment in attachments:
attachment_data = {
'name': attachment[0],
'datas_fname': attachment[0],
'datas': attachment[1],
'res_model': 'mail.message',
'res_id': mail.mail_message_id.id,
}
context = dict(context)
context.pop('default_type', None)
attachment_ids.append(ir_attachment.create(cr, uid, attachment_data, context=context))
if attachment_ids:
values['attachment_ids'] = [(6, 0, attachment_ids)]
mail_mail.write(cr, uid, msg_id, {'attachment_ids': [(6, 0, attachment_ids)]}, context=context)
if force_send:
mail_mail.send(cr, uid, [msg_id], raise_exception=raise_exception, context=context)
return msg_id
# Compatibility method
def render_template(self, cr, uid, template, model, res_id, context=None):
return self.render_template_batch(cr, uid, template, model, [res_id], context)[res_id]
def get_email_template(self, cr, uid, template_id=False, record_id=None, context=None):
return self.get_email_template_batch(cr, uid, template_id, [record_id], context)[record_id]
def generate_email(self, cr, uid, template_id, res_id, context=None):
return self.generate_email_batch(cr, uid, template_id, [res_id], context)[res_id]
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | 186,850,029,866,402,720 | 50.341837 | 151 | 0.572725 | false |
kaushik94/boto | boto/mws/__init__.py | 429 | 1101 | # Copyright (c) 2008, Chris Moyer http://coredumped.org
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
| mit | -6,297,680,319,543,344,000 | 51.428571 | 74 | 0.770209 | false |
GoogleCloudPlatform/Data-Pipeline | app/src/pipelines/shardstage_test.py | 1 | 3800 | # Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""ShardStage unit tests."""
import mock
import logging
from src import basetest
from src.pipelines import shardstage
class SimpleShardStage(shardstage.ShardStage):
"""A Simple stage that just keeps track of what config it was started with."""
def __init__(self, config):
super(shardstage.ShardStage, self).__init__(config)
self.simple_shard_stage_config = config
class ShardStageTest(basetest.TestCase):
def setUp(self):
super(basetest.TestCase, self).setUp()
self.gcscompositor_mock = mock.patch(
'src.'
'pipelines.stages.gcscompositor.GcsCompositor').start()
def testNoNeedToShard(self):
configs = [
{},
{'start': 100},
{'length': 100, 'shardSize': 100},
{'length': 100, 'shardSize': 400},
]
for config in configs:
stage = SimpleShardStage(config)
self.assertEquals(([], []), stage.ShardStage(config))
def testNeedsToShard(self):
configs_and_results = [
({'length': 100, 'shardSize': 50, 'sinks': ['gs://bucket/name'],
'shardPrefix': 'shard', 'contentType': 'text/csv'},
{'shards': [
{'start': 0, 'length': 50, 'shardSize': 50,
'shardPrefix': 'shard', 'contentType': 'text/csv',
'sinks': ['gs://bucket/name/shard0']},
{'start': 50, 'length': 50, 'shardSize': 50,
'shardPrefix': 'shard', 'contentType': 'text/csv',
'sinks': ['gs://bucket/name/shard1']},
],
'compositors': [{
'sources': ['gs://bucket/name/shard0', 'gs://bucket/name/shard1'],
'contentType': 'text/csv',
'deleteSources': True,
'sinks': ['gs://bucket/name'],
}]}),
({'length': 100, 'shardSize': 99, 'sinks': ['gs://bucket/name'],
'contentType': 'text/csv'},
{'shards': [
{'start': 0, 'length': 50, 'shardSize': 50,
'contentType': 'text/csv',
'sinks': ['gs://bucket/name/0']},
{'start': 50, 'length': 50, 'shardSize': 50,
'contentType': 'text/csv',
'sinks': ['gs://bucket/name/1']},
],
'compositors': [{
'sources': ['gs://bucket/name/0', 'gs://bucket/name/1'],
'contentType': 'text/csv',
'deleteSources': True,
'sinks': ['gs://bucket/name'],
}]})
]
for config, result in configs_and_results:
with mock.patch('uuid.uuid4', side_effect=[str(x) for x in range(10)]):
stage = SimpleShardStage(config)
(shards, compositors) = stage.ShardStage(config)
self.assertEquals(len(result['shards']), len(shards))
self.assertEquals(len(result['compositors']), len(compositors))
for expected, actual in zip(result['shards'], shards):
self.assertSameStructure(expected, actual.simple_shard_stage_config)
for expected, actual in zip(result['compositors'], compositors):
gcscompositor_config = self.gcscompositor_mock.call_args[0][0]
self.assertSameStructure(expected, gcscompositor_config)
if __name__ == '__main__':
basetest.main()
| apache-2.0 | -4,107,881,364,371,993,600 | 35.893204 | 80 | 0.587632 | false |
dimm0/tacc_stats | tacc_stats/pickler/tests/pickler_test.py | 1 | 4459 | from __future__ import print_function
import os, sys
from nose import with_setup
import cPickle as pickle
from tacc_stats.pickler import job_pickles
from tacc_stats.pickler import job_stats, batch_acct
sys.modules['pickler.job_stats'] = job_stats
sys.modules['pickler.batch_acct'] = batch_acct
path = os.path.dirname(os.path.abspath(__file__))
data_dir = os.path.join(path, 'data')
def setup_func():
a = open(os.path.join(path,"cfg.py"),"w")
a.write('archive_dir = \"' + data_dir + '\"\n'
'acct_path = \"'+ data_dir +'/tacc_jobs_completed\"\n'
'host_list_dir= \"' + data_dir + '\"\n'
'pickles_dir= \"' + path + '\"\n'
'host_name_ext= \"platform.extension\"\n'
'batch_system = \"SLURM\"\n'
'seek=0\n')
a.close()
def teardown_func():
os.remove(os.path.join(path,"cfg.py"))
try:
os.remove(os.path.join(path,"cfg.pyc"))
except: pass
os.remove(os.path.join(path,'2013-10-01',"1835740"))
os.rmdir(os.path.join(path,'2013-10-01'))
os.remove(os.path.join(path,'2014-10-31',"20"))
os.rmdir(os.path.join(path,'2014-10-31'))
@with_setup(setup_func, teardown_func)
def test():
from tacc_stats.pickler.tests import cfg
pickle_options = { 'processes' : 1,
'start' : '2013-10-01',
'end' : '2014-11-01',
'pickle_dir' : cfg.pickles_dir,
'batch_system' : cfg.batch_system,
'acct_path' : cfg.acct_path,
'archive_dir' : cfg.archive_dir,
'host_list_dir' : cfg.host_list_dir,
'host_name_ext' : cfg.host_name_ext,
'seek' : cfg.seek
}
pickler = job_pickles.JobPickles(**pickle_options)
pickler.run()
assert os.path.isfile(os.path.join(path,'2013-10-01','1835740')) == True
print("Pickle file generated.")
old = pickle.load(open(os.path.join(path,'1835740_ref')))
new = pickle.load(open(os.path.join(path,'2013-10-01','1835740')))
compare_newtoold(new,old)
old = pickle.load(open(os.path.join(path,'20_ref')))
new = pickle.load(open(os.path.join(path,'2014-10-31','20')))
compare_newtoold(new,old)
@with_setup(setup_func, teardown_func)
def test_ids():
from tacc_stats.pickler.tests import cfg
pickle_options = { 'processes' : 1,
'pickle_dir' : cfg.pickles_dir,
'batch_system' : cfg.batch_system,
'acct_path' : cfg.acct_path,
'archive_dir' : cfg.archive_dir,
'host_list_dir' : cfg.host_list_dir,
'host_name_ext' : cfg.host_name_ext,
'seek' : cfg.seek
}
pickler = job_pickles.JobPickles(**pickle_options)
pickler.run(['1835740'])
pickler.run(['20'])
assert os.path.isfile(os.path.join(path,'2013-10-01','1835740')) == True
print("Pickle file generated.")
old = pickle.load(open(os.path.join(path,'1835740_ref')))
new = pickle.load(open(os.path.join(path,'2013-10-01','1835740')))
compare_newtoold(new,old)
old = pickle.load(open(os.path.join(path,'20_ref')))
new = pickle.load(open(os.path.join(path,'2014-10-31','20')))
compare_newtoold(new,old)
def compare_newtoold(new,old):
assert new.id == old.id
for i in range(len(old.times)):
assert new.times[i] == old.times[i]
for i in range(len(old.hosts.keys())):
assert old.hosts.keys()[i] == new.hosts.keys()[i]
print('id, keys, and times match.')
for host_name, host in old.hosts.iteritems():
for type_name, type_stats in host.stats.iteritems():
if type_name =='ib': continue
for dev_name, dev_stats in type_stats.iteritems():
for i in range(len(dev_stats)):
for j in range(len(dev_stats[i])):
if new.hosts[host_name].stats[type_name][dev_name][i][j]-dev_stats[i][j] != 0.0:
print(new.times[i],host_name,type_name,dev_name,new.hosts[host_name].stats[type_name][dev_name][i][j],dev_stats[i][j])
assert new.hosts[host_name].stats[type_name][dev_name][i][j] == dev_stats[i][j]
print('stats match.')
| lgpl-2.1 | 6,955,468,638,837,430,000 | 39.536364 | 146 | 0.545638 | false |
Slezhuk/ansible | lib/ansible/modules/network/nmcli.py | 21 | 41915 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2015, Chris Long <[email protected]> <[email protected]>
#
# This file is a module for Ansible that interacts with Network Manager
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION='''
---
module: nmcli
author: "Chris Long (@alcamie101)"
short_description: Manage Networking
requirements: [ nmcli, dbus ]
version_added: "2.0"
description:
- Manage the network devices. Create, modify, and manage, ethernet, teams, bonds, vlans etc.
options:
state:
required: True
choices: [ present, absent ]
description:
- Whether the device should exist or not, taking action if the state is different from what is stated.
autoconnect:
required: False
default: "yes"
choices: [ "yes", "no" ]
description:
- Whether the connection should start on boot.
- Whether the connection profile can be automatically activated
conn_name:
required: True
description:
- 'Where conn_name will be the name used to call the connection. when not provided a default name is generated: <type>[-<ifname>][-<num>]'
ifname:
required: False
default: conn_name
description:
- Where IFNAME will be the what we call the interface name.
- interface to bind the connection to. The connection will only be applicable to this interface name.
- A special value of "*" can be used for interface-independent connections.
- The ifname argument is mandatory for all connection types except bond, team, bridge and vlan.
type:
required: False
choices: [ ethernet, team, team-slave, bond, bond-slave, bridge, vlan ]
description:
- This is the type of device or network connection that you wish to create.
mode:
required: False
choices: [ "balance-rr", "active-backup", "balance-xor", "broadcast", "802.3ad", "balance-tlb", "balance-alb" ]
default: balence-rr
description:
- This is the type of device or network connection that you wish to create for a bond, team or bridge.
master:
required: False
default: None
description:
- master <master (ifname, or connection UUID or conn_name) of bridge, team, bond master connection profile.
ip4:
required: False
default: None
description:
- 'The IPv4 address to this interface using this format ie: "192.0.2.24/24"'
gw4:
required: False
description:
- 'The IPv4 gateway for this interface using this format ie: "192.0.2.1"'
dns4:
required: False
default: None
description:
- 'A list of upto 3 dns servers, ipv4 format e.g. To add two IPv4 DNS server addresses: ["192.0.2.53", "198.51.100.53"]'
ip6:
required: False
default: None
description:
- 'The IPv6 address to this interface using this format ie: "abbe::cafe"'
gw6:
required: False
default: None
description:
- 'The IPv6 gateway for this interface using this format ie: "2001:db8::1"'
dns6:
required: False
description:
- 'A list of upto 3 dns servers, ipv6 format e.g. To add two IPv6 DNS server addresses: ["2001:4860:4860::8888 2001:4860:4860::8844"]'
mtu:
required: False
default: 1500
description:
- The connection MTU, e.g. 9000. This can't be applied when creating the interface and is done once the interface has been created.
- Can be used when modifying Team, VLAN, Ethernet (Future plans to implement wifi, pppoe, infiniband)
primary:
required: False
default: None
description:
- This is only used with bond and is the primary interface name (for "active-backup" mode), this is the usually the 'ifname'
miimon:
required: False
default: 100
description:
- This is only used with bond - miimon
downdelay:
required: False
default: None
description:
- This is only used with bond - downdelay
updelay:
required: False
default: None
description:
- This is only used with bond - updelay
arp_interval:
required: False
default: None
description:
- This is only used with bond - ARP interval
arp_ip_target:
required: False
default: None
description:
- This is only used with bond - ARP IP target
stp:
required: False
default: None
description:
- This is only used with bridge and controls whether Spanning Tree Protocol (STP) is enabled for this bridge
priority:
required: False
default: 128
description:
- This is only used with 'bridge' - sets STP priority
forwarddelay:
required: False
default: 15
description:
- This is only used with bridge - [forward-delay <2-30>] STP forwarding delay, in seconds
hellotime:
required: False
default: 2
description:
- This is only used with bridge - [hello-time <1-10>] STP hello time, in seconds
maxage:
required: False
default: 20
description:
- This is only used with bridge - [max-age <6-42>] STP maximum message age, in seconds
ageingtime:
required: False
default: 300
description:
- This is only used with bridge - [ageing-time <0-1000000>] the Ethernet MAC address aging time, in seconds
mac:
required: False
default: None
description:
- >
This is only used with bridge - MAC address of the bridge
(note: this requires a recent kernel feature, originally introduced in 3.15 upstream kernel)
slavepriority:
required: False
default: 32
description:
- This is only used with 'bridge-slave' - [<0-63>] - STP priority of this slave
path_cost:
required: False
default: 100
description:
- This is only used with 'bridge-slave' - [<1-65535>] - STP port cost for destinations via this slave
hairpin:
required: False
default: yes
description:
- This is only used with 'bridge-slave' - 'hairpin mode' for the slave, which allows frames to be sent back out through the slave the
frame was received on.
vlanid:
required: False
default: None
description:
- This is only used with VLAN - VLAN ID in range <0-4095>
vlandev:
required: False
default: None
description:
- This is only used with VLAN - parent device this VLAN is on, can use ifname
flags:
required: False
default: None
description:
- This is only used with VLAN - flags
ingress:
required: False
default: None
description:
- This is only used with VLAN - VLAN ingress priority mapping
egress:
required: False
default: None
description:
- This is only used with VLAN - VLAN egress priority mapping
'''
EXAMPLES='''
# These examples are using the following inventory:
#
# ## Directory layout:
#
# |_/inventory/cloud-hosts
# | /group_vars/openstack-stage.yml
# | /host_vars/controller-01.openstack.host.com
# | /host_vars/controller-02.openstack.host.com
# |_/playbook/library/nmcli.py
# | /playbook-add.yml
# | /playbook-del.yml
# ```
#
# ## inventory examples
# ### groups_vars
# ```yml
# ---
# #devops_os_define_network
# storage_gw: "192.0.2.254"
# external_gw: "198.51.100.254"
# tenant_gw: "203.0.113.254"
#
# #Team vars
# nmcli_team:
# - conn_name: tenant
# ip4: '{{ tenant_ip }}'
# gw4: '{{ tenant_gw }}'
# - conn_name: external
# ip4: '{{ external_ip }}'
# gw4: '{{ external_gw }}'
# - conn_name: storage
# ip4: '{{ storage_ip }}'
# gw4: '{{ storage_gw }}'
# nmcli_team_slave:
# - conn_name: em1
# ifname: em1
# master: tenant
# - conn_name: em2
# ifname: em2
# master: tenant
# - conn_name: p2p1
# ifname: p2p1
# master: storage
# - conn_name: p2p2
# ifname: p2p2
# master: external
#
# #bond vars
# nmcli_bond:
# - conn_name: tenant
# ip4: '{{ tenant_ip }}'
# gw4: ''
# mode: balance-rr
# - conn_name: external
# ip4: '{{ external_ip }}'
# gw4: ''
# mode: balance-rr
# - conn_name: storage
# ip4: '{{ storage_ip }}'
# gw4: '{{ storage_gw }}'
# mode: balance-rr
# nmcli_bond_slave:
# - conn_name: em1
# ifname: em1
# master: tenant
# - conn_name: em2
# ifname: em2
# master: tenant
# - conn_name: p2p1
# ifname: p2p1
# master: storage
# - conn_name: p2p2
# ifname: p2p2
# master: external
#
# #ethernet vars
# nmcli_ethernet:
# - conn_name: em1
# ifname: em1
# ip4: '{{ tenant_ip }}'
# gw4: '{{ tenant_gw }}'
# - conn_name: em2
# ifname: em2
# ip4: '{{ tenant_ip1 }}'
# gw4: '{{ tenant_gw }}'
# - conn_name: p2p1
# ifname: p2p1
# ip4: '{{ storage_ip }}'
# gw4: '{{ storage_gw }}'
# - conn_name: p2p2
# ifname: p2p2
# ip4: '{{ external_ip }}'
# gw4: '{{ external_gw }}'
# ```
#
# ### host_vars
# ```yml
# ---
# storage_ip: "192.0.2.91/23"
# external_ip: "198.51.100.23/21"
# tenant_ip: "203.0.113.77/23"
# ```
## playbook-add.yml example
---
- hosts: openstack-stage
remote_user: root
tasks:
- name: install needed network manager libs
yum:
name: '{{ item }}'
state: installed
with_items:
- NetworkManager-glib
- libnm-qt-devel.x86_64
- nm-connection-editor.x86_64
- libsemanage-python
- policycoreutils-python
##### Working with all cloud nodes - Teaming
- name: try nmcli add team - conn_name only & ip4 gw4
nmcli:
type: team
conn_name: '{{ item.conn_name }}'
ip4: '{{ item.ip4 }}'
gw4: '{{ item.gw4 }}'
state: present
with_items:
- '{{ nmcli_team }}'
- name: try nmcli add teams-slave
nmcli:
type: team-slave
conn_name: '{{ item.conn_name }}'
ifname: '{{ item.ifname }}'
master: '{{ item.master }}'
state: present
with_items:
- '{{ nmcli_team_slave }}'
###### Working with all cloud nodes - Bonding
- name: try nmcli add bond - conn_name only & ip4 gw4 mode
nmcli:
type: bond
conn_name: '{{ item.conn_name }}'
ip4: '{{ item.ip4 }}'
gw4: '{{ item.gw4 }}'
mode: '{{ item.mode }}'
state: present
with_items:
- '{{ nmcli_bond }}'
- name: try nmcli add bond-slave
nmcli:
type: bond-slave
conn_name: '{{ item.conn_name }}'
ifname: '{{ item.ifname }}'
master: '{{ item.master }}'
state: present
with_items:
- '{{ nmcli_bond_slave }}'
##### Working with all cloud nodes - Ethernet
- name: nmcli add Ethernet - conn_name only & ip4 gw4
nmcli:
type: ethernet
conn_name: '{{ item.conn_name }}'
ip4: '{{ item.ip4 }}'
gw4: '{{ item.gw4 }}'
state: present
with_items:
- '{{ nmcli_ethernet }}'
## playbook-del.yml example
- hosts: openstack-stage
remote_user: root
tasks:
- name: try nmcli del team - multiple
nmcli:
conn_name: '{{ item.conn_name }}'
state: absent
with_items:
- conn_name: em1
- conn_name: em2
- conn_name: p1p1
- conn_name: p1p2
- conn_name: p2p1
- conn_name: p2p2
- conn_name: tenant
- conn_name: storage
- conn_name: external
- conn_name: team-em1
- conn_name: team-em2
- conn_name: team-p1p1
- conn_name: team-p1p2
- conn_name: team-p2p1
- conn_name: team-p2p2
# To add an Ethernet connection with static IP configuration, issue a command as follows
- nmcli:
conn_name: my-eth1
ifname: eth1
type: ethernet
ip4: 192.0.2.100/24
gw4: 192.0.2.1
state: present
# To add an Team connection with static IP configuration, issue a command as follows
- nmcli:
conn_name: my-team1
ifname: my-team1
type: team
ip4: 192.0.2.100/24
gw4: 192.0.2.1
state: present
autoconnect: yes
# Optionally, at the same time specify IPv6 addresses for the device as follows:
- nmcli:
conn_name: my-eth1
ifname: eth1
type: ethernet
ip4: 192.0.2.100/24
gw4: 192.0.2.1
ip6: '2001:db8::cafe'
gw6: '2001:db8::1'
state: present
# To add two IPv4 DNS server addresses:
- nmcli:
conn_name: my-eth1
dns4:
- 192.0.2.53
- 198.51.100.53
state: present
# To make a profile usable for all compatible Ethernet interfaces, issue a command as follows
- nmcli:
ctype: ethernet
name: my-eth1
ifname: '*'
state: present
# To change the property of a setting e.g. MTU, issue a command as follows:
- nmcli:
conn_name: my-eth1
mtu: 9000
type: ethernet
state: present
# Exit Status's:
# - nmcli exits with status 0 if it succeeds, a value greater than 0 is
# returned if an error occurs.
# - 0 Success - indicates the operation succeeded
# - 1 Unknown or unspecified error
# - 2 Invalid user input, wrong nmcli invocation
# - 3 Timeout expired (see --wait option)
# - 4 Connection activation failed
# - 5 Connection deactivation failed
# - 6 Disconnecting device failed
# - 7 Connection deletion failed
# - 8 NetworkManager is not running
# - 9 nmcli and NetworkManager versions mismatch
# - 10 Connection, device, or access point does not exist.
'''
# import ansible.module_utils.basic
import os
import sys
HAVE_DBUS=False
try:
import dbus
HAVE_DBUS=True
except ImportError:
pass
HAVE_NM_CLIENT=False
try:
from gi.repository import NetworkManager, NMClient
HAVE_NM_CLIENT=True
except ImportError:
pass
from ansible.module_utils.basic import AnsibleModule
class Nmcli(object):
"""
This is the generic nmcli manipulation class that is subclassed based on platform.
A subclass may wish to override the following action methods:-
- create_connection()
- delete_connection()
- modify_connection()
- show_connection()
- up_connection()
- down_connection()
All subclasses MUST define platform and distribution (which may be None).
"""
platform='Generic'
distribution=None
if HAVE_DBUS:
bus=dbus.SystemBus()
# The following is going to be used in dbus code
DEVTYPES={1: "Ethernet",
2: "Wi-Fi",
5: "Bluetooth",
6: "OLPC",
7: "WiMAX",
8: "Modem",
9: "InfiniBand",
10: "Bond",
11: "VLAN",
12: "ADSL",
13: "Bridge",
14: "Generic",
15: "Team"
}
STATES={0: "Unknown",
10: "Unmanaged",
20: "Unavailable",
30: "Disconnected",
40: "Prepare",
50: "Config",
60: "Need Auth",
70: "IP Config",
80: "IP Check",
90: "Secondaries",
100: "Activated",
110: "Deactivating",
120: "Failed"
}
def __init__(self, module):
self.module=module
self.state=module.params['state']
self.autoconnect=module.params['autoconnect']
self.conn_name=module.params['conn_name']
self.master=module.params['master']
self.ifname=module.params['ifname']
self.type=module.params['type']
self.ip4=module.params['ip4']
self.gw4=module.params['gw4']
self.dns4=module.params['dns4']
self.ip6=module.params['ip6']
self.gw6=module.params['gw6']
self.dns6=module.params['dns6']
self.mtu=module.params['mtu']
self.stp=module.params['stp']
self.priority=module.params['priority']
self.mode=module.params['mode']
self.miimon=module.params['miimon']
self.downdelay=module.params['downdelay']
self.updelay=module.params['updelay']
self.arp_interval=module.params['arp_interval']
self.arp_ip_target=module.params['arp_ip_target']
self.slavepriority=module.params['slavepriority']
self.forwarddelay=module.params['forwarddelay']
self.hellotime=module.params['hellotime']
self.maxage=module.params['maxage']
self.ageingtime=module.params['ageingtime']
self.mac=module.params['mac']
self.vlanid=module.params['vlanid']
self.vlandev=module.params['vlandev']
self.flags=module.params['flags']
self.ingress=module.params['ingress']
self.egress=module.params['egress']
def execute_command(self, cmd, use_unsafe_shell=False, data=None):
return self.module.run_command(cmd, use_unsafe_shell=use_unsafe_shell, data=data)
def merge_secrets(self, proxy, config, setting_name):
try:
# returns a dict of dicts mapping name::setting, where setting is a dict
# mapping key::value. Each member of the 'setting' dict is a secret
secrets=proxy.GetSecrets(setting_name)
# Copy the secrets into our connection config
for setting in secrets:
for key in secrets[setting]:
config[setting_name][key]=secrets[setting][key]
except Exception as e:
pass
def dict_to_string(self, d):
# Try to trivially translate a dictionary's elements into nice string
# formatting.
dstr=""
for key in d:
val=d[key]
str_val=""
add_string=True
if isinstance(val, dbus.Array):
for elt in val:
if isinstance(elt, dbus.Byte):
str_val+="%s " % int(elt)
elif isinstance(elt, dbus.String):
str_val+="%s" % elt
elif isinstance(val, dbus.Dictionary):
dstr+=self.dict_to_string(val)
add_string=False
else:
str_val=val
if add_string:
dstr+="%s: %s\n" % ( key, str_val)
return dstr
def connection_to_string(self, config):
# dump a connection configuration to use in list_connection_info
setting_list=[]
for setting_name in config:
setting_list.append(self.dict_to_string(config[setting_name]))
return setting_list
# print ""
def bool_to_string(self, boolean):
if boolean:
return "yes"
else:
return "no"
def list_connection_info(self):
# Ask the settings service for the list of connections it provides
bus=dbus.SystemBus()
service_name="org.freedesktop.NetworkManager"
proxy=bus.get_object(service_name, "/org/freedesktop/NetworkManager/Settings")
settings=dbus.Interface(proxy, "org.freedesktop.NetworkManager.Settings")
connection_paths=settings.ListConnections()
connection_list=[]
# List each connection's name, UUID, and type
for path in connection_paths:
con_proxy=bus.get_object(service_name, path)
settings_connection=dbus.Interface(con_proxy, "org.freedesktop.NetworkManager.Settings.Connection")
config=settings_connection.GetSettings()
# Now get secrets too; we grab the secrets for each type of connection
# (since there isn't a "get all secrets" call because most of the time
# you only need 'wifi' secrets or '802.1x' secrets, not everything) and
# merge that into the configuration data - To use at a later stage
self.merge_secrets(settings_connection, config, '802-11-wireless')
self.merge_secrets(settings_connection, config, '802-11-wireless-security')
self.merge_secrets(settings_connection, config, '802-1x')
self.merge_secrets(settings_connection, config, 'gsm')
self.merge_secrets(settings_connection, config, 'cdma')
self.merge_secrets(settings_connection, config, 'ppp')
# Get the details of the 'connection' setting
s_con=config['connection']
connection_list.append(s_con['id'])
connection_list.append(s_con['uuid'])
connection_list.append(s_con['type'])
connection_list.append(self.connection_to_string(config))
return connection_list
def connection_exists(self):
# we are going to use name and type in this instance to find if that connection exists and is of type x
connections=self.list_connection_info()
for con_item in connections:
if self.conn_name==con_item:
return True
def down_connection(self):
cmd=[self.module.get_bin_path('nmcli', True)]
# if self.connection_exists():
cmd.append('con')
cmd.append('down')
cmd.append(self.conn_name)
return self.execute_command(cmd)
def up_connection(self):
cmd=[self.module.get_bin_path('nmcli', True)]
cmd.append('con')
cmd.append('up')
cmd.append(self.conn_name)
return self.execute_command(cmd)
def create_connection_team(self):
cmd=[self.module.get_bin_path('nmcli', True)]
# format for creating team interface
cmd.append('con')
cmd.append('add')
cmd.append('type')
cmd.append('team')
cmd.append('con-name')
if self.conn_name is not None:
cmd.append(self.conn_name)
elif self.ifname is not None:
cmd.append(self.ifname)
cmd.append('ifname')
if self.ifname is not None:
cmd.append(self.ifname)
elif self.conn_name is not None:
cmd.append(self.conn_name)
if self.ip4 is not None:
cmd.append('ip4')
cmd.append(self.ip4)
if self.gw4 is not None:
cmd.append('gw4')
cmd.append(self.gw4)
if self.ip6 is not None:
cmd.append('ip6')
cmd.append(self.ip6)
if self.gw6 is not None:
cmd.append('gw6')
cmd.append(self.gw6)
if self.autoconnect is not None:
cmd.append('autoconnect')
cmd.append(self.bool_to_string(self.autoconnect))
return cmd
def modify_connection_team(self):
cmd=[self.module.get_bin_path('nmcli', True)]
# format for modifying team interface
cmd.append('con')
cmd.append('mod')
cmd.append(self.conn_name)
if self.ip4 is not None:
cmd.append('ipv4.address')
cmd.append(self.ip4)
if self.gw4 is not None:
cmd.append('ipv4.gateway')
cmd.append(self.gw4)
if self.dns4 is not None:
cmd.append('ipv4.dns')
cmd.append(self.dns4)
if self.ip6 is not None:
cmd.append('ipv6.address')
cmd.append(self.ip6)
if self.gw6 is not None:
cmd.append('ipv6.gateway')
cmd.append(self.gw6)
if self.dns6 is not None:
cmd.append('ipv6.dns')
cmd.append(self.dns6)
if self.autoconnect is not None:
cmd.append('autoconnect')
cmd.append(self.bool_to_string(self.autoconnect))
# Can't use MTU with team
return cmd
def create_connection_team_slave(self):
cmd=[self.module.get_bin_path('nmcli', True)]
# format for creating team-slave interface
cmd.append('connection')
cmd.append('add')
cmd.append('type')
cmd.append(self.type)
cmd.append('con-name')
if self.conn_name is not None:
cmd.append(self.conn_name)
elif self.ifname is not None:
cmd.append(self.ifname)
cmd.append('ifname')
if self.ifname is not None:
cmd.append(self.ifname)
elif self.conn_name is not None:
cmd.append(self.conn_name)
cmd.append('master')
if self.conn_name is not None:
cmd.append(self.master)
# if self.mtu is not None:
# cmd.append('802-3-ethernet.mtu')
# cmd.append(self.mtu)
return cmd
def modify_connection_team_slave(self):
cmd=[self.module.get_bin_path('nmcli', True)]
# format for modifying team-slave interface
cmd.append('con')
cmd.append('mod')
cmd.append(self.conn_name)
cmd.append('connection.master')
cmd.append(self.master)
if self.mtu is not None:
cmd.append('802-3-ethernet.mtu')
cmd.append(self.mtu)
return cmd
def create_connection_bond(self):
cmd=[self.module.get_bin_path('nmcli', True)]
# format for creating bond interface
cmd.append('con')
cmd.append('add')
cmd.append('type')
cmd.append('bond')
cmd.append('con-name')
if self.conn_name is not None:
cmd.append(self.conn_name)
elif self.ifname is not None:
cmd.append(self.ifname)
cmd.append('ifname')
if self.ifname is not None:
cmd.append(self.ifname)
elif self.conn_name is not None:
cmd.append(self.conn_name)
if self.ip4 is not None:
cmd.append('ip4')
cmd.append(self.ip4)
if self.gw4 is not None:
cmd.append('gw4')
cmd.append(self.gw4)
if self.ip6 is not None:
cmd.append('ip6')
cmd.append(self.ip6)
if self.gw6 is not None:
cmd.append('gw6')
cmd.append(self.gw6)
if self.autoconnect is not None:
cmd.append('autoconnect')
cmd.append(self.bool_to_string(self.autoconnect))
if self.mode is not None:
cmd.append('mode')
cmd.append(self.mode)
if self.miimon is not None:
cmd.append('miimon')
cmd.append(self.miimon)
if self.downdelay is not None:
cmd.append('downdelay')
cmd.append(self.downdelay)
if self.downdelay is not None:
cmd.append('updelay')
cmd.append(self.updelay)
if self.downdelay is not None:
cmd.append('arp-interval')
cmd.append(self.arp_interval)
if self.downdelay is not None:
cmd.append('arp-ip-target')
cmd.append(self.arp_ip_target)
return cmd
def modify_connection_bond(self):
cmd=[self.module.get_bin_path('nmcli', True)]
# format for modifying bond interface
cmd.append('con')
cmd.append('mod')
cmd.append(self.conn_name)
if self.ip4 is not None:
cmd.append('ipv4.address')
cmd.append(self.ip4)
if self.gw4 is not None:
cmd.append('ipv4.gateway')
cmd.append(self.gw4)
if self.dns4 is not None:
cmd.append('ipv4.dns')
cmd.append(self.dns4)
if self.ip6 is not None:
cmd.append('ipv6.address')
cmd.append(self.ip6)
if self.gw6 is not None:
cmd.append('ipv6.gateway')
cmd.append(self.gw6)
if self.dns6 is not None:
cmd.append('ipv6.dns')
cmd.append(self.dns6)
if self.autoconnect is not None:
cmd.append('autoconnect')
cmd.append(self.bool_to_string(self.autoconnect))
return cmd
def create_connection_bond_slave(self):
cmd=[self.module.get_bin_path('nmcli', True)]
# format for creating bond-slave interface
cmd.append('connection')
cmd.append('add')
cmd.append('type')
cmd.append('bond-slave')
cmd.append('con-name')
if self.conn_name is not None:
cmd.append(self.conn_name)
elif self.ifname is not None:
cmd.append(self.ifname)
cmd.append('ifname')
if self.ifname is not None:
cmd.append(self.ifname)
elif self.conn_name is not None:
cmd.append(self.conn_name)
cmd.append('master')
if self.conn_name is not None:
cmd.append(self.master)
return cmd
def modify_connection_bond_slave(self):
cmd=[self.module.get_bin_path('nmcli', True)]
# format for modifying bond-slave interface
cmd.append('con')
cmd.append('mod')
cmd.append(self.conn_name)
cmd.append('connection.master')
cmd.append(self.master)
return cmd
def create_connection_ethernet(self):
cmd=[self.module.get_bin_path('nmcli', True)]
# format for creating ethernet interface
# To add an Ethernet connection with static IP configuration, issue a command as follows
# - nmcli: name=add conn_name=my-eth1 ifname=eth1 type=ethernet ip4=192.0.2.100/24 gw4=192.0.2.1 state=present
# nmcli con add con-name my-eth1 ifname eth1 type ethernet ip4 192.0.2.100/24 gw4 192.0.2.1
cmd.append('con')
cmd.append('add')
cmd.append('type')
cmd.append('ethernet')
cmd.append('con-name')
if self.conn_name is not None:
cmd.append(self.conn_name)
elif self.ifname is not None:
cmd.append(self.ifname)
cmd.append('ifname')
if self.ifname is not None:
cmd.append(self.ifname)
elif self.conn_name is not None:
cmd.append(self.conn_name)
if self.ip4 is not None:
cmd.append('ip4')
cmd.append(self.ip4)
if self.gw4 is not None:
cmd.append('gw4')
cmd.append(self.gw4)
if self.ip6 is not None:
cmd.append('ip6')
cmd.append(self.ip6)
if self.gw6 is not None:
cmd.append('gw6')
cmd.append(self.gw6)
if self.autoconnect is not None:
cmd.append('autoconnect')
cmd.append(self.bool_to_string(self.autoconnect))
return cmd
def modify_connection_ethernet(self):
cmd=[self.module.get_bin_path('nmcli', True)]
# format for modifying ethernet interface
# To add an Ethernet connection with static IP configuration, issue a command as follows
# - nmcli: name=add conn_name=my-eth1 ifname=eth1 type=ethernet ip4=192.0.2.100/24 gw4=192.0.2.1 state=present
# nmcli con add con-name my-eth1 ifname eth1 type ethernet ip4 192.0.2.100/24 gw4 192.0.2.1
cmd.append('con')
cmd.append('mod')
cmd.append(self.conn_name)
if self.ip4 is not None:
cmd.append('ipv4.address')
cmd.append(self.ip4)
if self.gw4 is not None:
cmd.append('ipv4.gateway')
cmd.append(self.gw4)
if self.dns4 is not None:
cmd.append('ipv4.dns')
cmd.append(self.dns4)
if self.ip6 is not None:
cmd.append('ipv6.address')
cmd.append(self.ip6)
if self.gw6 is not None:
cmd.append('ipv6.gateway')
cmd.append(self.gw6)
if self.dns6 is not None:
cmd.append('ipv6.dns')
cmd.append(self.dns6)
if self.mtu is not None:
cmd.append('802-3-ethernet.mtu')
cmd.append(self.mtu)
if self.autoconnect is not None:
cmd.append('autoconnect')
cmd.append(self.bool_to_string(self.autoconnect))
return cmd
def create_connection_bridge(self):
cmd=[self.module.get_bin_path('nmcli', True)]
# format for creating bridge interface
return cmd
def modify_connection_bridge(self):
cmd=[self.module.get_bin_path('nmcli', True)]
# format for modifying bridge interface
return cmd
def create_connection_vlan(self):
cmd=[self.module.get_bin_path('nmcli', True)]
# format for creating ethernet interface
return cmd
def modify_connection_vlan(self):
cmd=[self.module.get_bin_path('nmcli', True)]
# format for modifying ethernet interface
return cmd
def create_connection(self):
cmd=[]
if self.type=='team':
# cmd=self.create_connection_team()
if (self.dns4 is not None) or (self.dns6 is not None):
cmd=self.create_connection_team()
self.execute_command(cmd)
cmd=self.modify_connection_team()
self.execute_command(cmd)
cmd=self.up_connection()
return self.execute_command(cmd)
elif (self.dns4 is None) or (self.dns6 is None):
cmd=self.create_connection_team()
return self.execute_command(cmd)
elif self.type=='team-slave':
if self.mtu is not None:
cmd=self.create_connection_team_slave()
self.execute_command(cmd)
cmd=self.modify_connection_team_slave()
self.execute_command(cmd)
# cmd=self.up_connection()
return self.execute_command(cmd)
else:
cmd=self.create_connection_team_slave()
return self.execute_command(cmd)
elif self.type=='bond':
if (self.mtu is not None) or (self.dns4 is not None) or (self.dns6 is not None):
cmd=self.create_connection_bond()
self.execute_command(cmd)
cmd=self.modify_connection_bond()
self.execute_command(cmd)
cmd=self.up_connection()
return self.execute_command(cmd)
else:
cmd=self.create_connection_bond()
return self.execute_command(cmd)
elif self.type=='bond-slave':
cmd=self.create_connection_bond_slave()
elif self.type=='ethernet':
if (self.mtu is not None) or (self.dns4 is not None) or (self.dns6 is not None):
cmd=self.create_connection_ethernet()
self.execute_command(cmd)
cmd=self.modify_connection_ethernet()
self.execute_command(cmd)
cmd=self.up_connection()
return self.execute_command(cmd)
else:
cmd=self.create_connection_ethernet()
return self.execute_command(cmd)
elif self.type=='bridge':
cmd=self.create_connection_bridge()
elif self.type=='vlan':
cmd=self.create_connection_vlan()
return self.execute_command(cmd)
def remove_connection(self):
# self.down_connection()
cmd=[self.module.get_bin_path('nmcli', True)]
cmd.append('con')
cmd.append('del')
cmd.append(self.conn_name)
return self.execute_command(cmd)
def modify_connection(self):
cmd=[]
if self.type=='team':
cmd=self.modify_connection_team()
elif self.type=='team-slave':
cmd=self.modify_connection_team_slave()
elif self.type=='bond':
cmd=self.modify_connection_bond()
elif self.type=='bond-slave':
cmd=self.modify_connection_bond_slave()
elif self.type=='ethernet':
cmd=self.modify_connection_ethernet()
elif self.type=='bridge':
cmd=self.modify_connection_bridge()
elif self.type=='vlan':
cmd=self.modify_connection_vlan()
return self.execute_command(cmd)
def main():
# Parsing argument file
module=AnsibleModule(
argument_spec=dict(
autoconnect=dict(required=False, default=None, type='bool'),
state=dict(required=True, choices=['present', 'absent'], type='str'),
conn_name=dict(required=True, type='str'),
master=dict(required=False, default=None, type='str'),
ifname=dict(required=False, default=None, type='str'),
type=dict(required=False, default=None, choices=['ethernet', 'team', 'team-slave', 'bond', 'bond-slave', 'bridge', 'vlan'], type='str'),
ip4=dict(required=False, default=None, type='str'),
gw4=dict(required=False, default=None, type='str'),
dns4=dict(required=False, default=None, type='str'),
ip6=dict(required=False, default=None, type='str'),
gw6=dict(required=False, default=None, type='str'),
dns6=dict(required=False, default=None, type='str'),
# Bond Specific vars
mode=dict(require=False, default="balance-rr", type='str', choices=["balance-rr", "active-backup", "balance-xor", "broadcast", "802.3ad",
"balance-tlb", "balance-alb"]),
miimon=dict(required=False, default=None, type='str'),
downdelay=dict(required=False, default=None, type='str'),
updelay=dict(required=False, default=None, type='str'),
arp_interval=dict(required=False, default=None, type='str'),
arp_ip_target=dict(required=False, default=None, type='str'),
# general usage
mtu=dict(required=False, default=None, type='str'),
mac=dict(required=False, default=None, type='str'),
# bridge specific vars
stp=dict(required=False, default=True, type='bool'),
priority=dict(required=False, default="128", type='str'),
slavepriority=dict(required=False, default="32", type='str'),
forwarddelay=dict(required=False, default="15", type='str'),
hellotime=dict(required=False, default="2", type='str'),
maxage=dict(required=False, default="20", type='str'),
ageingtime=dict(required=False, default="300", type='str'),
# vlan specific vars
vlanid=dict(required=False, default=None, type='str'),
vlandev=dict(required=False, default=None, type='str'),
flags=dict(required=False, default=None, type='str'),
ingress=dict(required=False, default=None, type='str'),
egress=dict(required=False, default=None, type='str'),
),
supports_check_mode=True
)
if not HAVE_DBUS:
module.fail_json(msg="This module requires dbus python bindings")
if not HAVE_NM_CLIENT:
module.fail_json(msg="This module requires NetworkManager glib API")
nmcli=Nmcli(module)
rc=None
out=''
err=''
result={}
result['conn_name']=nmcli.conn_name
result['state']=nmcli.state
# check for issues
if nmcli.conn_name is None:
nmcli.module.fail_json(msg="You haven't specified a name for the connection")
# team-slave checks
if nmcli.type=='team-slave' and nmcli.master is None:
nmcli.module.fail_json(msg="You haven't specified a name for the master so we're not changing a thing")
if nmcli.type=='team-slave' and nmcli.ifname is None:
nmcli.module.fail_json(msg="You haven't specified a name for the connection")
if nmcli.state=='absent':
if nmcli.connection_exists():
if module.check_mode:
module.exit_json(changed=True)
(rc, out, err)=nmcli.down_connection()
(rc, out, err)=nmcli.remove_connection()
if rc!=0:
module.fail_json(name =('No Connection named %s exists' % nmcli.conn_name), msg=err, rc=rc)
elif nmcli.state=='present':
if nmcli.connection_exists():
# modify connection (note: this function is check mode aware)
# result['Connection']=('Connection %s of Type %s is not being added' % (nmcli.conn_name, nmcli.type))
result['Exists']='Connections do exist so we are modifying them'
if module.check_mode:
module.exit_json(changed=True)
(rc, out, err)=nmcli.modify_connection()
if not nmcli.connection_exists():
result['Connection']=('Connection %s of Type %s is being added' % (nmcli.conn_name, nmcli.type))
if module.check_mode:
module.exit_json(changed=True)
(rc, out, err)=nmcli.create_connection()
if rc is not None and rc!=0:
module.fail_json(name=nmcli.conn_name, msg=err, rc=rc)
if rc is None:
result['changed']=False
else:
result['changed']=True
if out:
result['stdout']=out
if err:
result['stderr']=err
module.exit_json(**result)
if __name__ == '__main__':
main()
| gpl-3.0 | -1,055,855,326,051,997,400 | 33.900083 | 150 | 0.578862 | false |
noisemaster/AdamTestBot | future/builtins/newround.py | 2 | 3204 | """
``python-future``: pure Python implementation of Python 3 round().
"""
from future.utils import PYPY, PY26, bind_method
# Use the decimal module for simplicity of implementation (and
# hopefully correctness).
from decimal import Decimal, ROUND_HALF_EVEN
def newround(number, ndigits=None):
"""
See Python 3 documentation: uses Banker's Rounding.
Delegates to the __round__ method if for some reason this exists.
If not, rounds a number to a given precision in decimal digits (default
0 digits). This returns an int when called with one argument,
otherwise the same type as the number. ndigits may be negative.
See the test_round method in future/tests/test_builtins.py for
examples.
"""
return_int = False
if ndigits is None:
return_int = True
ndigits = 0
if hasattr(number, '__round__'):
return number.__round__(ndigits)
if ndigits < 0:
raise NotImplementedError('negative ndigits not supported yet')
exponent = Decimal('10') ** (-ndigits)
if PYPY:
# Work around issue #24: round() breaks on PyPy with NumPy's types
if 'numpy' in repr(type(number)):
number = float(number)
if not PY26:
d = Decimal.from_float(number).quantize(exponent,
rounding=ROUND_HALF_EVEN)
else:
d = from_float_26(number).quantize(exponent, rounding=ROUND_HALF_EVEN)
if return_int:
return int(d)
else:
return float(d)
### From Python 2.7's decimal.py. Only needed to support Py2.6:
def from_float_26(f):
"""Converts a float to a decimal number, exactly.
Note that Decimal.from_float(0.1) is not the same as Decimal('0.1').
Since 0.1 is not exactly representable in binary floating point, the
value is stored as the nearest representable value which is
0x1.999999999999ap-4. The exact equivalent of the value in decimal
is 0.1000000000000000055511151231257827021181583404541015625.
>>> Decimal.from_float(0.1)
Decimal('0.1000000000000000055511151231257827021181583404541015625')
>>> Decimal.from_float(float('nan'))
Decimal('NaN')
>>> Decimal.from_float(float('inf'))
Decimal('Infinity')
>>> Decimal.from_float(-float('inf'))
Decimal('-Infinity')
>>> Decimal.from_float(-0.0)
Decimal('-0')
"""
import math as _math
from decimal import _dec_from_triple # only available on Py2.6 and Py2.7 (not 3.3)
if isinstance(f, (int, long)): # handle integer inputs
return Decimal(f)
if _math.isinf(f) or _math.isnan(f): # raises TypeError if not a float
return Decimal(repr(f))
if _math.copysign(1.0, f) == 1.0:
sign = 0
else:
sign = 1
n, d = abs(f).as_integer_ratio()
# int.bit_length() method doesn't exist on Py2.6:
def bit_length(d):
if d != 0:
return len(bin(abs(d))) - 2
else:
return 0
k = bit_length(d) - 1
result = _dec_from_triple(sign, str(n*5**k), -k)
return result
__all__ = ['newround']
| mit | 569,651,341,855,183,740 | 30.363636 | 89 | 0.609863 | false |
glwu/python-for-android | python-build/python-libs/xmpppy/xmpp/roster.py | 203 | 9163 | ## roster.py
##
## Copyright (C) 2003-2005 Alexey "Snake" Nezhdanov
##
## This program is free software; you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation; either version 2, or (at your option)
## any later version.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
# $Id: roster.py,v 1.20 2005/07/13 13:22:52 snakeru Exp $
"""
Simple roster implementation. Can be used though for different tasks like
mass-renaming of contacts.
"""
from protocol import *
from client import PlugIn
class Roster(PlugIn):
""" Defines a plenty of methods that will allow you to manage roster.
Also automatically track presences from remote JIDs taking into
account that every JID can have multiple resources connected. Does not
currently support 'error' presences.
You can also use mapping interface for access to the internal representation of
contacts in roster.
"""
def __init__(self):
""" Init internal variables. """
PlugIn.__init__(self)
self.DBG_LINE='roster'
self._data = {}
self.set=None
self._exported_methods=[self.getRoster]
def plugin(self,owner,request=1):
""" Register presence and subscription trackers in the owner's dispatcher.
Also request roster from server if the 'request' argument is set.
Used internally."""
self._owner.RegisterHandler('iq',self.RosterIqHandler,'result',NS_ROSTER)
self._owner.RegisterHandler('iq',self.RosterIqHandler,'set',NS_ROSTER)
self._owner.RegisterHandler('presence',self.PresenceHandler)
if request: self.Request()
def Request(self,force=0):
""" Request roster from server if it were not yet requested
(or if the 'force' argument is set). """
if self.set is None: self.set=0
elif not force: return
self._owner.send(Iq('get',NS_ROSTER))
self.DEBUG('Roster requested from server','start')
def getRoster(self):
""" Requests roster from server if neccessary and returns self."""
if not self.set: self.Request()
while not self.set: self._owner.Process(10)
return self
def RosterIqHandler(self,dis,stanza):
""" Subscription tracker. Used internally for setting items state in
internal roster representation. """
for item in stanza.getTag('query').getTags('item'):
jid=item.getAttr('jid')
if item.getAttr('subscription')=='remove':
if self._data.has_key(jid): del self._data[jid]
raise NodeProcessed # a MUST
self.DEBUG('Setting roster item %s...'%jid,'ok')
if not self._data.has_key(jid): self._data[jid]={}
self._data[jid]['name']=item.getAttr('name')
self._data[jid]['ask']=item.getAttr('ask')
self._data[jid]['subscription']=item.getAttr('subscription')
self._data[jid]['groups']=[]
if not self._data[jid].has_key('resources'): self._data[jid]['resources']={}
for group in item.getTags('group'): self._data[jid]['groups'].append(group.getData())
self._data[self._owner.User+'@'+self._owner.Server]={'resources':{},'name':None,'ask':None,'subscription':None,'groups':None,}
self.set=1
raise NodeProcessed # a MUST. Otherwise you'll get back an <iq type='error'/>
def PresenceHandler(self,dis,pres):
""" Presence tracker. Used internally for setting items' resources state in
internal roster representation. """
jid=JID(pres.getFrom())
if not self._data.has_key(jid.getStripped()): self._data[jid.getStripped()]={'name':None,'ask':None,'subscription':'none','groups':['Not in roster'],'resources':{}}
item=self._data[jid.getStripped()]
typ=pres.getType()
if not typ:
self.DEBUG('Setting roster item %s for resource %s...'%(jid.getStripped(),jid.getResource()),'ok')
item['resources'][jid.getResource()]=res={'show':None,'status':None,'priority':'0','timestamp':None}
if pres.getTag('show'): res['show']=pres.getShow()
if pres.getTag('status'): res['status']=pres.getStatus()
if pres.getTag('priority'): res['priority']=pres.getPriority()
if not pres.getTimestamp(): pres.setTimestamp()
res['timestamp']=pres.getTimestamp()
elif typ=='unavailable' and item['resources'].has_key(jid.getResource()): del item['resources'][jid.getResource()]
# Need to handle type='error' also
def _getItemData(self,jid,dataname):
""" Return specific jid's representation in internal format. Used internally. """
jid=jid[:(jid+'/').find('/')]
return self._data[jid][dataname]
def _getResourceData(self,jid,dataname):
""" Return specific jid's resource representation in internal format. Used internally. """
if jid.find('/')+1:
jid,resource=jid.split('/',1)
if self._data[jid]['resources'].has_key(resource): return self._data[jid]['resources'][resource][dataname]
elif self._data[jid]['resources'].keys():
lastpri=-129
for r in self._data[jid]['resources'].keys():
if int(self._data[jid]['resources'][r]['priority'])>lastpri: resource,lastpri=r,int(self._data[jid]['resources'][r]['priority'])
return self._data[jid]['resources'][resource][dataname]
def delItem(self,jid):
""" Delete contact 'jid' from roster."""
self._owner.send(Iq('set',NS_ROSTER,payload=[Node('item',{'jid':jid,'subscription':'remove'})]))
def getAsk(self,jid):
""" Returns 'ask' value of contact 'jid'."""
return self._getItemData(jid,'ask')
def getGroups(self,jid):
""" Returns groups list that contact 'jid' belongs to."""
return self._getItemData(jid,'groups')
def getName(self,jid):
""" Returns name of contact 'jid'."""
return self._getItemData(jid,'name')
def getPriority(self,jid):
""" Returns priority of contact 'jid'. 'jid' should be a full (not bare) JID."""
return self._getResourceData(jid,'priority')
def getRawRoster(self):
""" Returns roster representation in internal format. """
return self._data
def getRawItem(self,jid):
""" Returns roster item 'jid' representation in internal format. """
return self._data[jid[:(jid+'/').find('/')]]
def getShow(self, jid):
""" Returns 'show' value of contact 'jid'. 'jid' should be a full (not bare) JID."""
return self._getResourceData(jid,'show')
def getStatus(self, jid):
""" Returns 'status' value of contact 'jid'. 'jid' should be a full (not bare) JID."""
return self._getResourceData(jid,'status')
def getSubscription(self,jid):
""" Returns 'subscription' value of contact 'jid'."""
return self._getItemData(jid,'subscription')
def getResources(self,jid):
""" Returns list of connected resources of contact 'jid'."""
return self._data[jid[:(jid+'/').find('/')]]['resources'].keys()
def setItem(self,jid,name=None,groups=[]):
""" Creates/renames contact 'jid' and sets the groups list that it now belongs to."""
iq=Iq('set',NS_ROSTER)
query=iq.getTag('query')
attrs={'jid':jid}
if name: attrs['name']=name
item=query.setTag('item',attrs)
for group in groups: item.addChild(node=Node('group',payload=[group]))
self._owner.send(iq)
def getItems(self):
""" Return list of all [bare] JIDs that the roster is currently tracks."""
return self._data.keys()
def keys(self):
""" Same as getItems. Provided for the sake of dictionary interface."""
return self._data.keys()
def __getitem__(self,item):
""" Get the contact in the internal format. Raises KeyError if JID 'item' is not in roster."""
return self._data[item]
def getItem(self,item):
""" Get the contact in the internal format (or None if JID 'item' is not in roster)."""
if self._data.has_key(item): return self._data[item]
def Subscribe(self,jid):
""" Send subscription request to JID 'jid'."""
self._owner.send(Presence(jid,'subscribe'))
def Unsubscribe(self,jid):
""" Ask for removing our subscription for JID 'jid'."""
self._owner.send(Presence(jid,'unsubscribe'))
def Authorize(self,jid):
""" Authorise JID 'jid'. Works only if these JID requested auth previously. """
self._owner.send(Presence(jid,'subscribed'))
def Unauthorize(self,jid):
""" Unauthorise JID 'jid'. Use for declining authorisation request
or for removing existing authorization. """
self._owner.send(Presence(jid,'unsubscribed'))
| apache-2.0 | 1,597,796,118,777,687,600 | 48.798913 | 172 | 0.624141 | false |
Astrophilic/Algorithms_Example | Bitap Algorithm/Python/BiTap.py | 9 | 6159 | # -*- coding: utf-8 -*-
import sys
"""Auxiliary procedure for printing each item of row in columns in binary form
"""
def _printTable(t, size):
out = ""
for i in range(len(t)):
binaryForm = bin(t[i])
binaryForm = binaryForm[2 : ]
binaryForm = binaryForm.zfill(size)
out += binaryForm + ", "
out = out[ : -2]
print out
"""Bitap (Shift-Or) fuzzy searching algorithm with Wu-Manber modifications.
http://habrahabr.ru/post/114997/
http://habrahabr.ru/post/132128/
http://ru.wikipedia.org/wiki/Двоичный_алгоритм_поиска_подстроки
Search needle(pattern) in haystack(real word from text) with maximum alterations = maxErrors.
If maxErrors equal 0 - execute precise searching only.
Return approximately place of needle in haystack and number of alterations.
If needle can't find with maxErrors alterations, return tuple of empty string and -1.
"""
def bitapSearch(haystack, needle, maxErrors):
haystackLen = len(haystack)
needleLen = len(needle)
"""Genarating mask for each letter in haystack.
This mask shows presence letter in needle.
"""
def _generateAlphabet(needle, haystack):
alphabet = {}
for letter in haystack:
if letter not in alphabet:
letterPositionInNeedle = 0
for symbol in needle:
letterPositionInNeedle = letterPositionInNeedle << 1
letterPositionInNeedle |= int(letter != symbol)
alphabet[letter] = letterPositionInNeedle
return alphabet
alphabet = _generateAlphabet(needle, haystack)
table = [] # first index - over k (errors count, numeration starts from 1), second - over columns (letters of haystack)
emptyColumn = (2 << (needleLen - 1)) - 1
# Generate underground level of table
underground = []
[underground.append(emptyColumn) for i in range(haystackLen + 1)]
table.append(underground)
_printTable(table[0], needleLen)
# Execute precise matching
k = 1
table.append([emptyColumn])
for columnNum in range(1, haystackLen + 1):
prevColumn = (table[k][columnNum - 1]) >> 1
letterPattern = alphabet[haystack[columnNum - 1]]
curColumn = prevColumn | letterPattern
table[k].append(curColumn)
if (curColumn & 0x1) == 0:
place = haystack[columnNum - needleLen : columnNum]
return (place, k - 1)
_printTable(table[k], needleLen)
# Execute fuzzy searching with calculation Levenshtein distance
for k in range(2, maxErrors + 2):
print "Errors =", k - 1
table.append([emptyColumn])
for columnNum in range(1, haystackLen + 1):
prevColumn = (table[k][columnNum - 1]) >> 1
letterPattern = alphabet[haystack[columnNum - 1]]
curColumn = prevColumn | letterPattern
insertColumn = curColumn & (table[k - 1][columnNum - 1])
deleteColumn = curColumn & (table[k - 1][columnNum] >> 1)
replaceColumn = curColumn & (table[k - 1][columnNum - 1] >> 1)
resColumn = insertColumn & deleteColumn & replaceColumn
table[k].append(resColumn)
if (resColumn & 0x1) == 0:
startPos = max(0, columnNum - needleLen - 1) # taking in account Replace operation
endPos = min(columnNum + 1, haystackLen) # taking in account Replace operation
place = haystack[startPos : endPos]
return (place, k - 1)
_printTable(table[k], needleLen)
return ("", -1)
"""Highlight letters in fullWord, which concur with letters in pattern with same order.
wordPart - it's a part of fullWord, where matching with pattern letters will execute.
"""
class bitapHighlighter():
def __init__(self, fullWord, wordPart, pattern):
self._fullWord = fullWord
self._wordPart = wordPart
self._pattern = pattern
self._largestSequence = ""
"""Finding longest sequence of letters in word. Letters must have same order, as in pattern
"""
def _nextSequence(self, fromPatternPos, fromWordPos, prevSequence):
for patternPos in range(fromPatternPos, len(self._pattern)):
char = self._pattern[patternPos]
for wordPos in range(fromWordPos, len(self._wordPart)):
if char == self._wordPart[wordPos]:
sequence = prevSequence + char
self._nextSequence(patternPos + 1, wordPos + 1, sequence)
if len(self._largestSequence) < len(prevSequence):
self._largestSequence = prevSequence
"""Divide fullWord on parts: head, place(wordPart) and tail.
Select each letter of wordPart, which present in _largestSequence with <b></b> tags
Return gathered parts in one highlighted full word
"""
def _gatherFullWord(self):
placePos = self._fullWord.find(self._wordPart)
head = self._fullWord[0 : placePos]
tail = self._fullWord[placePos + len(self._wordPart) : ]
highlightedPlace = ""
for symbol in self._wordPart:
if symbol == self._largestSequence[0 : 1]:
highlightedPlace += "<b>" + symbol + "</b>"
self._largestSequence = self._largestSequence[1 : ]
else:
highlightedPlace += symbol
return head + highlightedPlace + tail
"""Run highlighting and return highlited word.
"""
def getHighlightedWord(self):
self._nextSequence(0, 0, "")
return self._gatherFullWord()
haystack = sys.argv[1]
needle = sys.argv[2]
errorsCount = sys.argv[3]
print "haystack = " + haystack + ". needle = " + needle + ". errorsCount = " + errorsCount
# Display letters of haystack in columns
out = ""
out = out.ljust(len(needle) + 2)
for i in range(len(haystack)):
out += haystack[i].ljust(len(needle)) + " "
out = out[ : -2]
print out
# Start bitap searching
(needlePlace, errors) = bitapSearch(haystack, needle, int(errorsCount))
print "Result of Bitap searching:", needlePlace, errors
print bitapHighlighter(haystack, needlePlace, needle).getHighlightedWord()
| apache-2.0 | -6,937,071,466,671,167,000 | 39.052288 | 123 | 0.634465 | false |
cyberark-bizdev/ansible | lib/ansible/modules/clustering/k8s/k8s_raw.py | 2 | 4093 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2018, Chris Houseknecht <@chouseknecht>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
module: k8s_raw
short_description: Manage Kubernetes (K8s) objects
version_added: "2.5"
author: "Chris Houseknecht (@chouseknecht)"
description:
- Use the OpenShift Python client to perform CRUD operations on K8s objects.
- Pass the object definition from a source file or inline. See examples for reading
files and using Jinja templates.
- Access to the full range of K8s APIs.
- Authenticate using either a config file, certificates, password or token.
- Supports check mode.
extends_documentation_fragment:
- k8s_state_options
- k8s_name_options
- k8s_resource_options
- k8s_auth_options
requirements:
- "python >= 2.7"
- "openshift == 0.4.1"
- "PyYAML >= 3.11"
'''
EXAMPLES = '''
- name: Create a k8s namespace
k8s_raw:
name: testing
api_version: v1
kind: Namespace
state: present
- name: Create a Service object from an inline definition
k8s_raw:
state: present
definition:
apiVersion: v1
kind: Service
metadata:
name: web
namespace: testing
labels:
app: galaxy
service: web
spec:
selector:
app: galaxy
service: web
ports:
- protocol: TCP
targetPort: 8000
name: port-8000-tcp
port: 8000
- name: Create a Service object by reading the definition from a file
k8s_raw:
state: present
src: /testing/service.yml
- name: Get an existing Service object
k8s_raw:
api_version: v1
kind: Service
name: web
namespace: testing
register: web_service
- name: Get a list of all service objects
k8s_raw:
api_version: v1
kind: ServiceList
namespace: testing
register: service_list
- name: Remove an existing Service object
k8s_raw:
state: absent
api_version: v1
kind: Service
namespace: testing
name: web
# Passing the object definition from a file
- name: Create a Deployment by reading the definition from a local file
k8s_raw:
state: present
src: /testing/deployment.yml
- name: Read definition file from the Ansible controller file system
k8s_raw:
state: present
definition: "{{ lookup('file', '/testing/deployment.yml') | from_yaml }}"
- name: Read definition file from the Ansible controller file system after Jinja templating
k8s_raw:
state: present
definition: "{{ lookup('template', '/testing/deployment.yml') | from_yaml }}"
'''
RETURN = '''
result:
description:
- The created, patched, or otherwise present object. Will be empty in the case of a deletion.
returned: success
type: complex
contains:
api_version:
description: The versioned schema of this representation of an object.
returned: success
type: str
kind:
description: Represents the REST resource this object represents.
returned: success
type: str
metadata:
description: Standard object metadata. Includes name, namespace, annotations, labels, etc.
returned: success
type: complex
spec:
description: Specific attributes of the object. Will vary based on the I(api_version) and I(kind).
returned: success
type: complex
status:
description: Current status details for the object.
returned: success
type: complex
items:
description: Returned only when the I(kind) is a List type resource. Contains a set of objects.
returned: when resource is a List
type: list
'''
from ansible.module_utils.k8s.raw import KubernetesRawModule
def main():
KubernetesRawModule().execute_module()
if __name__ == '__main__':
main()
| gpl-3.0 | -321,503,115,781,719,200 | 24.110429 | 105 | 0.658441 | false |
LookThisCode/DeveloperBus | Season 2013/Brazil/Projects/QueroMe-master/openid/test/discoverdata.py | 87 | 4109 | """Module to make discovery data test cases available"""
import urlparse
import os.path
from openid.yadis.discover import DiscoveryResult, DiscoveryFailure
from openid.yadis.constants import YADIS_HEADER_NAME
tests_dir = os.path.dirname(__file__)
data_path = os.path.join(tests_dir, 'data')
testlist = [
# success, input_name, id_name, result_name
(True, "equiv", "equiv", "xrds"),
(True, "header", "header", "xrds"),
(True, "lowercase_header", "lowercase_header", "xrds"),
(True, "xrds", "xrds", "xrds"),
(True, "xrds_ctparam", "xrds_ctparam", "xrds_ctparam"),
(True, "xrds_ctcase", "xrds_ctcase", "xrds_ctcase"),
(False, "xrds_html", "xrds_html", "xrds_html"),
(True, "redir_equiv", "equiv", "xrds"),
(True, "redir_header", "header", "xrds"),
(True, "redir_xrds", "xrds", "xrds"),
(False, "redir_xrds_html", "xrds_html", "xrds_html"),
(True, "redir_redir_equiv", "equiv", "xrds"),
(False, "404_server_response", None, None),
(False, "404_with_header", None, None),
(False, "404_with_meta", None, None),
(False, "201_server_response", None, None),
(False, "500_server_response", None, None),
]
def getDataName(*components):
sanitized = []
for part in components:
if part in ['.', '..']:
raise ValueError
elif part:
sanitized.append(part)
if not sanitized:
raise ValueError
return os.path.join(data_path, *sanitized)
def getExampleXRDS():
filename = getDataName('example-xrds.xml')
return file(filename).read()
example_xrds = getExampleXRDS()
default_test_file = getDataName('test1-discover.txt')
discover_tests = {}
def readTests(filename):
data = file(filename).read()
tests = {}
for case in data.split('\f\n'):
(name, content) = case.split('\n', 1)
tests[name] = content
return tests
def getData(filename, name):
global discover_tests
try:
file_tests = discover_tests[filename]
except KeyError:
file_tests = discover_tests[filename] = readTests(filename)
return file_tests[name]
def fillTemplate(test_name, template, base_url, example_xrds):
mapping = [
('URL_BASE/', base_url),
('<XRDS Content>', example_xrds),
('YADIS_HEADER', YADIS_HEADER_NAME),
('NAME', test_name),
]
for k, v in mapping:
template = template.replace(k, v)
return template
def generateSample(test_name, base_url,
example_xrds=example_xrds,
filename=default_test_file):
try:
template = getData(filename, test_name)
except IOError, why:
import errno
if why[0] == errno.ENOENT:
raise KeyError(filename)
else:
raise
return fillTemplate(test_name, template, base_url, example_xrds)
def generateResult(base_url, input_name, id_name, result_name, success):
input_url = urlparse.urljoin(base_url, input_name)
# If the name is None then we expect the protocol to fail, which
# we represent by None
if id_name is None:
assert result_name is None
return input_url, DiscoveryFailure
result = generateSample(result_name, base_url)
headers, content = result.split('\n\n', 1)
header_lines = headers.split('\n')
for header_line in header_lines:
if header_line.startswith('Content-Type:'):
_, ctype = header_line.split(':', 1)
ctype = ctype.strip()
break
else:
ctype = None
id_url = urlparse.urljoin(base_url, id_name)
result = DiscoveryResult(input_url)
result.normalized_uri = id_url
if success:
result.xrds_uri = urlparse.urljoin(base_url, result_name)
result.content_type = ctype
result.response_text = content
return input_url, result
| apache-2.0 | -7,413,361,997,418,337,000 | 31.872 | 72 | 0.577269 | false |
urisimchoni/samba | python/samba/tests/dcerpc/string.py | 9 | 4393 | # Unix SMB/CIFS implementation.
# Copyright (C) Andrew Bartlett <[email protected]> 2016
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""Tests for string and unicode handling in PIDL generated bindings
samba.dcerpc.*"""
from samba.dcerpc import drsblobs
import samba.tests
from samba.ndr import ndr_unpack, ndr_pack
import talloc
import gc
class TestException(Exception):
pass
class StringTests(samba.tests.TestCase):
def setUp(self):
super(StringTests, self).setUp()
talloc.enable_null_tracking()
self.startup_blocks = talloc.total_blocks()
def tearDown(self):
super(StringTests, self).tearDown()
gc.collect()
if talloc.total_blocks() != self.startup_blocks:
talloc.report_full()
self.fail("it appears we are leaking memory")
def test_string_from_python(self):
info = drsblobs.repsFromTo2OtherInfo()
info.dns_name1 = "hello.example.com"
info.dns_name2 = "goodbye.example.com"
gc.collect()
self.assertIsNotNone(info)
self.assertEqual(info.dns_name1, "hello.example.com")
self.assertEqual(info.dns_name2, "goodbye.example.com")
info.dns_name1 = ""
info.dns_name2 = "goodbye.example.com"
self.assertEqual(info.dns_name1, "")
self.assertEqual(info.dns_name2, "goodbye.example.com")
info.dns_name2 = None
self.assertEqual(info.dns_name1, "")
self.assertIsNone(info.dns_name2)
def test_string_with_exception(self):
try:
self.test_string_from_python()
raise TestException()
except TestException:
pass
def test_string_from_python_function(self):
def get_info():
info = drsblobs.repsFromTo2OtherInfo()
info.dns_name1 = "1.example.com"
info.dns_name2 = "2.example.com"
return info
info = get_info()
gc.collect()
self.assertIsNotNone(info)
self.assertEqual(info.dns_name1, "1.example.com")
self.assertEqual(info.dns_name2, "2.example.com")
def test_string_modify_in_place(self):
info = drsblobs.repsFromTo2OtherInfo()
info.dns_name1 = "1.example.com"
info.dns_name2 = "%s.example.com"
gc.collect()
self.assertIsNotNone(info)
self.assertEqual(info.dns_name1, "1.example.com")
self.assertEqual(info.dns_name2, "%s.example.com")
info.dns_name1 += ".co.nz"
info.dns_name2 %= 2
self.assertEqual(info.dns_name1, "1.example.com.co.nz")
self.assertEqual(info.dns_name2, "2.example.com")
del info
def test_string_delete(self):
gc.collect()
info = drsblobs.repsFromTo2OtherInfo()
info.dns_name1 = "1.example.com"
info.dns_name2 = "2.example.com"
info.dns_name1 = None
try:
del info.dns_name2
except AttributeError:
pass
self.assertIsNotNone(info)
self.assertIsNone(info.dns_name1)
self.assertIsNotNone(info.dns_name2)
class StringTestsWithoutLeakCheck(samba.tests.TestCase):
"""We know that the ndr unpacking test leaves stuff in the
autofree_context, and we don't want to worry about that. So for
this test we don't make meory leak assertions."""
def test_string_from_ndr(self):
info = drsblobs.repsFromTo2OtherInfo()
info.dns_name1 = "1.example.com"
info.dns_name2 = "2.example.com"
packed = ndr_pack(info)
gc.collect()
info_unpacked = ndr_unpack(drsblobs.repsFromTo2OtherInfo, packed)
self.assertIsNotNone(info_unpacked)
self.assertEqual(info_unpacked.dns_name1, "1.example.com")
self.assertEqual(info_unpacked.dns_name2, "2.example.com")
| gpl-3.0 | -789,884,531,257,083,000 | 32.030075 | 73 | 0.64967 | false |
brandsoulmates/incubator-airflow | airflow/utils/operator_helpers.py | 30 | 1546 | # -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
def context_to_airflow_vars(context):
"""
Given a context, this function provides a dictionary of values that can be used to
externally reconstruct relations between dags, dag_runs, tasks and task_instances.
:param context: The context for the task_instance of interest
:type context: dict
"""
params = dict()
dag = context.get('dag')
if dag and dag.dag_id:
params['airflow.ctx.dag.dag_id'] = dag.dag_id
dag_run = context.get('dag_run')
if dag_run and dag_run.execution_date:
params['airflow.ctx.dag_run.execution_date'] = dag_run.execution_date.isoformat()
task = context.get('task')
if task and task.task_id:
params['airflow.ctx.task.task_id'] = task.task_id
task_instance = context.get('task_instance')
if task_instance and task_instance.execution_date:
params['airflow.ctx.task_instance.execution_date'] = \
task_instance.execution_date.isoformat()
return params
| apache-2.0 | -3,152,516,698,229,415,000 | 38.641026 | 89 | 0.699871 | false |
demarle/VTK | Examples/Modelling/Python/SpherePuzzle.py | 14 | 3824 | #!/usr/bin/env python
# A game with VTK and Tkinter. :)
import Tkinter
import vtk
from vtk.tk.vtkTkRenderWindowInteractor import vtkTkRenderWindowInteractor
# Create the pipeline
puzzle = vtk.vtkSpherePuzzle()
mapper = vtk.vtkPolyDataMapper()
mapper.SetInputConnection(puzzle.GetOutputPort())
actor = vtk.vtkActor()
actor.SetMapper(mapper)
arrows = vtk.vtkSpherePuzzleArrows()
mapper2 = vtk.vtkPolyDataMapper()
mapper2.SetInputConnection(arrows.GetOutputPort())
actor2 = vtk.vtkActor()
actor2.SetMapper(mapper2)
renWin = vtk.vtkRenderWindow()
ren = vtk.vtkRenderer()
renWin.AddRenderer(ren)
# Add the actors to the renderer, set the background and size
ren.AddActor(actor)
ren.AddActor(actor2)
ren.SetBackground(0.1, 0.2, 0.4)
ren.ResetCamera()
cam = ren.GetActiveCamera()
cam.Elevation(-40)
## Generate the GUI
root = Tkinter.Tk()
root.withdraw()
# Define a quit method that exits cleanly.
def quit(obj=root):
obj.quit()
# Create the toplevel window
top = Tkinter.Toplevel(root)
top.title("Sphere Puzzle")
top.protocol("WM_DELETE_WINDOW", quit)
# Create some frames
f1 = Tkinter.Frame(top)
f2 = Tkinter.Frame(top)
f1.pack(side="top", anchor="n", expand=1, fill="both")
f2.pack(side="bottom", anchor="s", expand="t", fill="x")
# Create the Tk render widget, and bind the events
rw = vtkTkRenderWindowInteractor(f1, width=400, height=400, rw=renWin)
rw.pack(expand="t", fill="both")
def reset(evt=None):
puzzle.Reset()
renWin.Render()
# Display some information
l1 = Tkinter.Label(f2, text="Position cursor over the rotation plane.")
l2 = Tkinter.Label(f2, text="Moving pieces will be highlighted.")
l3 = Tkinter.Label(f2, text="Press 'm' to make a move.")
reset = Tkinter.Button(f2, text="Reset", command=reset)
b1 = Tkinter.Button(f2, text="Quit", command=quit)
for i in (l1, l2, l3, reset, b1):
i.pack(side="top", expand="t", fill="x")
# Done with the GUI. Create callback functions.
in_piece_rotation = 0
LastVal = None
# Highlight pieces
def MotionCallback(obj, event):
global in_piece_rotation
global LastVal
if in_piece_rotation:
return
iren = renWin.GetInteractor()
istyle = iren.GetInteractorStyle().GetCurrentStyle()
# Return if the user is performing interaction
if istyle.GetState():
return
# Get mouse position
pos = iren.GetEventPosition()
x, y = pos
# Get world point
ren.SetDisplayPoint(x, y, ren.GetZ(x, y))
ren.DisplayToWorld()
pt = ren.GetWorldPoint()
val = puzzle.SetPoint(pt[0], pt[1], pt[2])
if (not LastVal) or val != LastVal:
renWin.Render()
LastVal = val
# Rotate the puzzle
def CharCallback(obj, event):
iren = renWin.GetInteractor()
keycode = iren.GetKeyCode()
if keycode != "m" and keycode != "M":
return
pos = iren.GetEventPosition()
ButtonCallback(pos[0], pos[1])
def ButtonCallback(x, y):
global in_piece_rotation
if in_piece_rotation:
return
in_piece_rotation = 1
# Get world point
ren.SetDisplayPoint(x, y, ren.GetZ(x,y))
ren.DisplayToWorld()
pt = ren.GetWorldPoint()
x, y, z = pt[:3]
for i in range(0, 101, 10):
puzzle.SetPoint(x, y, z)
puzzle.MovePoint(i)
renWin.Render()
root.update()
in_piece_rotation = 0
root.update()
# Modify some bindings, use the interactor style 'switch'
iren = renWin.GetInteractor()
istyle = vtk.vtkInteractorStyleSwitch()
iren.SetInteractorStyle(istyle)
istyle.SetCurrentStyleToTrackballCamera()
iren.AddObserver("MouseMoveEvent", MotionCallback)
iren.AddObserver("CharEvent", CharCallback)
# Shuffle the puzzle
ButtonCallback(218, 195)
ButtonCallback(261, 128)
ButtonCallback(213, 107)
ButtonCallback(203, 162)
ButtonCallback(134, 186)
iren.Initialize()
renWin.Render()
iren.Start()
root.mainloop()
| bsd-3-clause | 3,958,530,549,741,020,000 | 22.460123 | 74 | 0.699006 | false |
koomik/CouchPotatoServer | libs/caper/__init__.py | 81 | 5426 | # Copyright 2013 Dean Gardiner <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from logr import Logr
from caper.matcher import FragmentMatcher
from caper.objects import CaperFragment, CaperClosure
from caper.parsers.anime import AnimeParser
from caper.parsers.scene import SceneParser
from caper.parsers.usenet import UsenetParser
__version_info__ = ('0', '3', '1')
__version_branch__ = 'master'
__version__ = "%s%s" % (
'.'.join(__version_info__),
'-' + __version_branch__ if __version_branch__ else ''
)
CL_START_CHARS = ['(', '[', '<', '>']
CL_END_CHARS = [')', ']', '<', '>']
CL_END_STRINGS = [' - ']
STRIP_START_CHARS = ''.join(CL_START_CHARS)
STRIP_END_CHARS = ''.join(CL_END_CHARS)
STRIP_CHARS = ''.join(['_', ' ', '.'])
FRAGMENT_SEPARATORS = ['.', '-', '_', ' ']
CL_START = 0
CL_END = 1
class Caper(object):
def __init__(self, debug=False):
self.debug = debug
self.parsers = {
'anime': AnimeParser,
'scene': SceneParser,
'usenet': UsenetParser
}
def _closure_split(self, name):
"""
:type name: str
:rtype: list of CaperClosure
"""
closures = []
def end_closure(closures, buf):
buf = buf.strip(STRIP_CHARS)
if len(buf) < 2:
return
cur = CaperClosure(len(closures), buf)
cur.left = closures[len(closures) - 1] if len(closures) > 0 else None
if cur.left:
cur.left.right = cur
closures.append(cur)
state = CL_START
buf = ""
for x, ch in enumerate(name):
# Check for start characters
if state == CL_START and ch in CL_START_CHARS:
end_closure(closures, buf)
state = CL_END
buf = ""
buf += ch
if state == CL_END and ch in CL_END_CHARS:
# End character found, create the closure
end_closure(closures, buf)
state = CL_START
buf = ""
elif state == CL_START and buf[-3:] in CL_END_STRINGS:
# End string found, create the closure
end_closure(closures, buf[:-3])
state = CL_START
buf = ""
end_closure(closures, buf)
return closures
def _clean_closure(self, closure):
"""
:type closure: str
:rtype: str
"""
return closure.lstrip(STRIP_START_CHARS).rstrip(STRIP_END_CHARS)
def _fragment_split(self, closures):
"""
:type closures: list of CaperClosure
:rtype: list of CaperClosure
"""
cur_position = 0
cur = None
def end_fragment(fragments, cur, cur_position):
cur.position = cur_position
cur.left = fragments[len(fragments) - 1] if len(fragments) > 0 else None
if cur.left:
cur.left_sep = cur.left.right_sep
cur.left.right = cur
cur.right_sep = ch
fragments.append(cur)
for closure in closures:
closure.fragments = []
separator_buffer = ""
for x, ch in enumerate(self._clean_closure(closure.value)):
if not cur:
cur = CaperFragment(closure)
if ch in FRAGMENT_SEPARATORS:
if cur.value:
separator_buffer = ""
separator_buffer += ch
if cur.value or not closure.fragments:
end_fragment(closure.fragments, cur, cur_position)
elif len(separator_buffer) > 1:
cur.value = separator_buffer.strip()
if cur.value:
end_fragment(closure.fragments, cur, cur_position)
separator_buffer = ""
# Reset
cur = None
cur_position += 1
else:
cur.value += ch
# Finish parsing the last fragment
if cur and cur.value:
end_fragment(closure.fragments, cur, cur_position)
# Reset
cur_position = 0
cur = None
return closures
def parse(self, name, parser='scene'):
closures = self._closure_split(name)
closures = self._fragment_split(closures)
# Print closures
for closure in closures:
Logr.debug("closure [%s]", closure.value)
for fragment in closure.fragments:
Logr.debug("\tfragment [%s]", fragment.value)
if parser not in self.parsers:
raise ValueError("Unknown parser")
# TODO autodetect the parser type
return self.parsers[parser](self.debug).run(closures)
| gpl-3.0 | 4,125,948,053,443,144,000 | 26.825641 | 84 | 0.533727 | false |
TEAM-Gummy/platform_external_chromium_org | tools/telemetry/telemetry/core/backends/chrome/inspector_runtime_unittest.py | 25 | 1042 | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.core import exceptions
from telemetry.unittest import tab_test_case
class InspectorRuntimeTest(tab_test_case.TabTestCase):
def testRuntimeEvaluateSimple(self):
res = self._tab.EvaluateJavaScript('1+1')
assert res == 2
def testRuntimeEvaluateThatFails(self):
self.assertRaises(exceptions.EvaluateException,
lambda: self._tab.EvaluateJavaScript('fsdfsdfsf'))
def testRuntimeEvaluateOfSomethingThatCantJSONize(self):
def test():
self._tab.EvaluateJavaScript("""
var cur = {};
var root = {next: cur};
for (var i = 0; i < 1000; i++) {
next = {};
cur.next = next;
cur = next;
}
root;""")
self.assertRaises(exceptions.EvaluateException, test)
def testRuntimeExecuteOfSomethingThatCantJSONize(self):
self._tab.ExecuteJavaScript('window')
| bsd-3-clause | -1,642,055,858,081,244,400 | 32.612903 | 72 | 0.676583 | false |
MiltosD/CEFELRC | lib/python2.7/site-packages/django/middleware/http.py | 154 | 1696 | from django.core.exceptions import MiddlewareNotUsed
from django.utils.http import http_date, parse_http_date_safe
class ConditionalGetMiddleware(object):
"""
Handles conditional GET operations. If the response has a ETag or
Last-Modified header, and the request has If-None-Match or
If-Modified-Since, the response is replaced by an HttpNotModified.
Also sets the Date and Content-Length response-headers.
"""
def process_response(self, request, response):
response['Date'] = http_date()
if not response.has_header('Content-Length'):
response['Content-Length'] = str(len(response.content))
if response.has_header('ETag'):
if_none_match = request.META.get('HTTP_IF_NONE_MATCH')
if if_none_match == response['ETag']:
# Setting the status is enough here. The response handling path
# automatically removes content for this status code (in
# http.conditional_content_removal()).
response.status_code = 304
if response.has_header('Last-Modified'):
if_modified_since = request.META.get('HTTP_IF_MODIFIED_SINCE')
if if_modified_since is not None:
if_modified_since = parse_http_date_safe(if_modified_since)
if if_modified_since is not None:
last_modified = parse_http_date_safe(response['Last-Modified'])
if last_modified is not None and last_modified <= if_modified_since:
# Setting the status code is enough here (same reasons as
# above).
response.status_code = 304
return response
| bsd-3-clause | -5,667,663,759,282,940,000 | 46.111111 | 84 | 0.629717 | false |
littley/pyvolution | internal/BreedingPool.py | 1 | 3508 | import random
import math
class BreedingPool(object):
"""
This class is a container for Chromosomes. Allows efficient selection of chromosomes to be "bred".
This class implements a binary tree
"""
class node():
"""
Each node represents the "value" of a single chromosome. Used to map a random number to a particular chromosome
"""
def __init__(self, minVal, maxVal, chromosome):
self.min = minVal
self.max = maxVal
self.chromosome = chromosome
self.left = None
self.right = None
def __eq__(self, other):
"""
Given a floating point number, a cmap will return true for "==" if the number
falls within the range of the cmap
"""
if type(other) is type(self):
return self.min == other.min
return self.min <= other and self.max >= other
def __ne__(self, other):
if type(other) is type(self):
return self.min != other.min
return not self.__eq__(other)
def __lt__(self, other):
if type(other) is type(self):
return self.min < other.min
return self.max < other
def __gt__(self, other):
if type(other) is type(self):
return self.min > other.min
return self.min > other
def __le__(self, other):
if type(other) is type(self):
return self.min <= other.min
return self.__eq__(other) or self.__lt__(other)
def __ge__(self, other):
if type(other) is type(self):
return self.min >= other.min
return self.__eq__(other) or self.__lt__(other)
def __init__(self, population):
allNodes = []
self.max = 0.0 #used to track the maximum value, used by random number generator
for chromosome in population:
increase = chromosome.getFitness()
allNodes.append(self.node(self.max, self.max + increase, chromosome))
self.max += increase
allNodes.sort()
self.root = self.makeTree(allNodes)
def makeTree(self, pop):
"""
Given a sorted list of nodes, recursively construct a binary tree
:param pop: a sorted list of nodes
:return: the root of the tree
"""
if len(pop) == 0:
return None
elif len(pop) == 1:
return pop[0]
middleIndex = int(math.floor(len(pop) / 2))
leftList = pop[:middleIndex]
root = pop[middleIndex]
rightList = pop[middleIndex+1:]
root.left = self.makeTree(leftList)
root.right = self.makeTree(rightList)
return root
def findChromosome(self, n, target):
"""
Recursively search the tree for a chromosome
:param n: a node in the tree
:type n: node
:param target: look for a node that equals this target
:type target: float
:rtype: Chromosome
"""
if n is None:
return None
if n == target:
return n.chromosome
elif target < n:
return self.findChromosome(n.left, target)
elif target > n:
return self.findChromosome(n.right, target)
else:
return None
def get(self):
val = random.uniform(0, self.max)
return self.findChromosome(self.root, val) | apache-2.0 | 5,555,422,844,769,349,000 | 30.330357 | 120 | 0.540764 | false |
GodBlessPP/w16b_test | static/Brython3.1.3-20150514-095342/Lib/numbers.py | 883 | 10398 | # Copyright 2007 Google, Inc. All Rights Reserved.
# Licensed to PSF under a Contributor Agreement.
"""Abstract Base Classes (ABCs) for numbers, according to PEP 3141.
TODO: Fill out more detailed documentation on the operators."""
from abc import ABCMeta, abstractmethod
__all__ = ["Number", "Complex", "Real", "Rational", "Integral"]
class Number(metaclass=ABCMeta):
"""All numbers inherit from this class.
If you just want to check if an argument x is a number, without
caring what kind, use isinstance(x, Number).
"""
__slots__ = ()
# Concrete numeric types must provide their own hash implementation
__hash__ = None
## Notes on Decimal
## ----------------
## Decimal has all of the methods specified by the Real abc, but it should
## not be registered as a Real because decimals do not interoperate with
## binary floats (i.e. Decimal('3.14') + 2.71828 is undefined). But,
## abstract reals are expected to interoperate (i.e. R1 + R2 should be
## expected to work if R1 and R2 are both Reals).
class Complex(Number):
"""Complex defines the operations that work on the builtin complex type.
In short, those are: a conversion to complex, .real, .imag, +, -,
*, /, abs(), .conjugate, ==, and !=.
If it is given heterogenous arguments, and doesn't have special
knowledge about them, it should fall back to the builtin complex
type as described below.
"""
__slots__ = ()
@abstractmethod
def __complex__(self):
"""Return a builtin complex instance. Called for complex(self)."""
def __bool__(self):
"""True if self != 0. Called for bool(self)."""
return self != 0
@property
@abstractmethod
def real(self):
"""Retrieve the real component of this number.
This should subclass Real.
"""
raise NotImplementedError
@property
@abstractmethod
def imag(self):
"""Retrieve the imaginary component of this number.
This should subclass Real.
"""
raise NotImplementedError
@abstractmethod
def __add__(self, other):
"""self + other"""
raise NotImplementedError
@abstractmethod
def __radd__(self, other):
"""other + self"""
raise NotImplementedError
@abstractmethod
def __neg__(self):
"""-self"""
raise NotImplementedError
@abstractmethod
def __pos__(self):
"""+self"""
raise NotImplementedError
def __sub__(self, other):
"""self - other"""
return self + -other
def __rsub__(self, other):
"""other - self"""
return -self + other
@abstractmethod
def __mul__(self, other):
"""self * other"""
raise NotImplementedError
@abstractmethod
def __rmul__(self, other):
"""other * self"""
raise NotImplementedError
@abstractmethod
def __truediv__(self, other):
"""self / other: Should promote to float when necessary."""
raise NotImplementedError
@abstractmethod
def __rtruediv__(self, other):
"""other / self"""
raise NotImplementedError
@abstractmethod
def __pow__(self, exponent):
"""self**exponent; should promote to float or complex when necessary."""
raise NotImplementedError
@abstractmethod
def __rpow__(self, base):
"""base ** self"""
raise NotImplementedError
@abstractmethod
def __abs__(self):
"""Returns the Real distance from 0. Called for abs(self)."""
raise NotImplementedError
@abstractmethod
def conjugate(self):
"""(x+y*i).conjugate() returns (x-y*i)."""
raise NotImplementedError
@abstractmethod
def __eq__(self, other):
"""self == other"""
raise NotImplementedError
def __ne__(self, other):
"""self != other"""
# The default __ne__ doesn't negate __eq__ until 3.0.
return not (self == other)
Complex.register(complex)
class Real(Complex):
"""To Complex, Real adds the operations that work on real numbers.
In short, those are: a conversion to float, trunc(), divmod,
%, <, <=, >, and >=.
Real also provides defaults for the derived operations.
"""
__slots__ = ()
@abstractmethod
def __float__(self):
"""Any Real can be converted to a native float object.
Called for float(self)."""
raise NotImplementedError
@abstractmethod
def __trunc__(self):
"""trunc(self): Truncates self to an Integral.
Returns an Integral i such that:
* i>0 iff self>0;
* abs(i) <= abs(self);
* for any Integral j satisfying the first two conditions,
abs(i) >= abs(j) [i.e. i has "maximal" abs among those].
i.e. "truncate towards 0".
"""
raise NotImplementedError
@abstractmethod
def __floor__(self):
"""Finds the greatest Integral <= self."""
raise NotImplementedError
@abstractmethod
def __ceil__(self):
"""Finds the least Integral >= self."""
raise NotImplementedError
@abstractmethod
def __round__(self, ndigits=None):
"""Rounds self to ndigits decimal places, defaulting to 0.
If ndigits is omitted or None, returns an Integral, otherwise
returns a Real. Rounds half toward even.
"""
raise NotImplementedError
def __divmod__(self, other):
"""divmod(self, other): The pair (self // other, self % other).
Sometimes this can be computed faster than the pair of
operations.
"""
return (self // other, self % other)
def __rdivmod__(self, other):
"""divmod(other, self): The pair (self // other, self % other).
Sometimes this can be computed faster than the pair of
operations.
"""
return (other // self, other % self)
@abstractmethod
def __floordiv__(self, other):
"""self // other: The floor() of self/other."""
raise NotImplementedError
@abstractmethod
def __rfloordiv__(self, other):
"""other // self: The floor() of other/self."""
raise NotImplementedError
@abstractmethod
def __mod__(self, other):
"""self % other"""
raise NotImplementedError
@abstractmethod
def __rmod__(self, other):
"""other % self"""
raise NotImplementedError
@abstractmethod
def __lt__(self, other):
"""self < other
< on Reals defines a total ordering, except perhaps for NaN."""
raise NotImplementedError
@abstractmethod
def __le__(self, other):
"""self <= other"""
raise NotImplementedError
# Concrete implementations of Complex abstract methods.
def __complex__(self):
"""complex(self) == complex(float(self), 0)"""
return complex(float(self))
@property
def real(self):
"""Real numbers are their real component."""
return +self
@property
def imag(self):
"""Real numbers have no imaginary component."""
return 0
def conjugate(self):
"""Conjugate is a no-op for Reals."""
return +self
Real.register(float)
class Rational(Real):
""".numerator and .denominator should be in lowest terms."""
__slots__ = ()
@property
@abstractmethod
def numerator(self):
raise NotImplementedError
@property
@abstractmethod
def denominator(self):
raise NotImplementedError
# Concrete implementation of Real's conversion to float.
def __float__(self):
"""float(self) = self.numerator / self.denominator
It's important that this conversion use the integer's "true"
division rather than casting one side to float before dividing
so that ratios of huge integers convert without overflowing.
"""
return self.numerator / self.denominator
class Integral(Rational):
"""Integral adds a conversion to int and the bit-string operations."""
__slots__ = ()
@abstractmethod
def __int__(self):
"""int(self)"""
raise NotImplementedError
def __index__(self):
"""Called whenever an index is needed, such as in slicing"""
return int(self)
@abstractmethod
def __pow__(self, exponent, modulus=None):
"""self ** exponent % modulus, but maybe faster.
Accept the modulus argument if you want to support the
3-argument version of pow(). Raise a TypeError if exponent < 0
or any argument isn't Integral. Otherwise, just implement the
2-argument version described in Complex.
"""
raise NotImplementedError
@abstractmethod
def __lshift__(self, other):
"""self << other"""
raise NotImplementedError
@abstractmethod
def __rlshift__(self, other):
"""other << self"""
raise NotImplementedError
@abstractmethod
def __rshift__(self, other):
"""self >> other"""
raise NotImplementedError
@abstractmethod
def __rrshift__(self, other):
"""other >> self"""
raise NotImplementedError
@abstractmethod
def __and__(self, other):
"""self & other"""
raise NotImplementedError
@abstractmethod
def __rand__(self, other):
"""other & self"""
raise NotImplementedError
@abstractmethod
def __xor__(self, other):
"""self ^ other"""
raise NotImplementedError
@abstractmethod
def __rxor__(self, other):
"""other ^ self"""
raise NotImplementedError
@abstractmethod
def __or__(self, other):
"""self | other"""
raise NotImplementedError
@abstractmethod
def __ror__(self, other):
"""other | self"""
raise NotImplementedError
@abstractmethod
def __invert__(self):
"""~self"""
raise NotImplementedError
# Concrete implementations of Rational and Real abstract methods.
def __float__(self):
"""float(self) == float(int(self))"""
return float(int(self))
@property
def numerator(self):
"""Integers are their own numerators."""
return +self
@property
def denominator(self):
"""Integers have a denominator of 1."""
return 1
Integral.register(int)
| agpl-3.0 | -1,816,912,727,000,814,600 | 25.390863 | 80 | 0.597711 | false |
kw217/omim | 3party/protobuf/python/google/protobuf/internal/message_python_test.py | 74 | 2359 | #! /usr/bin/python
#
# Protocol Buffers - Google's data interchange format
# Copyright 2008 Google Inc. All rights reserved.
# https://developers.google.com/protocol-buffers/
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Tests for ..public.message for the pure Python implementation."""
import os
os.environ['PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION'] = 'python'
# We must set the implementation version above before the google3 imports.
# pylint: disable=g-import-not-at-top
from google.apputils import basetest
from google.protobuf.internal import api_implementation
# Run all tests from the original module by putting them in our namespace.
# pylint: disable=wildcard-import
from google.protobuf.internal.message_test import *
class ConfirmPurePythonTest(basetest.TestCase):
def testImplementationSetting(self):
self.assertEqual('python', api_implementation.Type())
if __name__ == '__main__':
basetest.main()
| apache-2.0 | 7,464,430,236,552,387,000 | 42.685185 | 74 | 0.778296 | false |
nekulin/arangodb | 3rdParty/V8-4.3.61/third_party/python_26/Lib/distutils/cygwinccompiler.py | 50 | 17299 | """distutils.cygwinccompiler
Provides the CygwinCCompiler class, a subclass of UnixCCompiler that
handles the Cygwin port of the GNU C compiler to Windows. It also contains
the Mingw32CCompiler class which handles the mingw32 port of GCC (same as
cygwin in no-cygwin mode).
"""
# problems:
#
# * if you use a msvc compiled python version (1.5.2)
# 1. you have to insert a __GNUC__ section in its config.h
# 2. you have to generate a import library for its dll
# - create a def-file for python??.dll
# - create a import library using
# dlltool --dllname python15.dll --def python15.def \
# --output-lib libpython15.a
#
# see also http://starship.python.net/crew/kernr/mingw32/Notes.html
#
# * We put export_symbols in a def-file, and don't use
# --export-all-symbols because it doesn't worked reliable in some
# tested configurations. And because other windows compilers also
# need their symbols specified this no serious problem.
#
# tested configurations:
#
# * cygwin gcc 2.91.57/ld 2.9.4/dllwrap 0.2.4 works
# (after patching python's config.h and for C++ some other include files)
# see also http://starship.python.net/crew/kernr/mingw32/Notes.html
# * mingw32 gcc 2.95.2/ld 2.9.4/dllwrap 0.2.4 works
# (ld doesn't support -shared, so we use dllwrap)
# * cygwin gcc 2.95.2/ld 2.10.90/dllwrap 2.10.90 works now
# - its dllwrap doesn't work, there is a bug in binutils 2.10.90
# see also http://sources.redhat.com/ml/cygwin/2000-06/msg01274.html
# - using gcc -mdll instead dllwrap doesn't work without -static because
# it tries to link against dlls instead their import libraries. (If
# it finds the dll first.)
# By specifying -static we force ld to link against the import libraries,
# this is windows standard and there are normally not the necessary symbols
# in the dlls.
# *** only the version of June 2000 shows these problems
# * cygwin gcc 3.2/ld 2.13.90 works
# (ld supports -shared)
# * mingw gcc 3.2/ld 2.13 works
# (ld supports -shared)
# This module should be kept compatible with Python 2.1.
__revision__ = "$Id: cygwinccompiler.py 65834 2008-08-18 19:23:47Z amaury.forgeotdarc $"
import os,sys,copy
from distutils.ccompiler import gen_preprocess_options, gen_lib_options
from distutils.unixccompiler import UnixCCompiler
from distutils.file_util import write_file
from distutils.errors import DistutilsExecError, CompileError, UnknownFileError
from distutils import log
def get_msvcr():
"""Include the appropriate MSVC runtime library if Python was built
with MSVC 7.0 or later.
"""
msc_pos = sys.version.find('MSC v.')
if msc_pos != -1:
msc_ver = sys.version[msc_pos+6:msc_pos+10]
if msc_ver == '1300':
# MSVC 7.0
return ['msvcr70']
elif msc_ver == '1310':
# MSVC 7.1
return ['msvcr71']
elif msc_ver == '1400':
# VS2005 / MSVC 8.0
return ['msvcr80']
elif msc_ver == '1500':
# VS2008 / MSVC 9.0
return ['msvcr90']
else:
raise ValueError("Unknown MS Compiler version %i " % msc_Ver)
class CygwinCCompiler (UnixCCompiler):
compiler_type = 'cygwin'
obj_extension = ".o"
static_lib_extension = ".a"
shared_lib_extension = ".dll"
static_lib_format = "lib%s%s"
shared_lib_format = "%s%s"
exe_extension = ".exe"
def __init__ (self, verbose=0, dry_run=0, force=0):
UnixCCompiler.__init__ (self, verbose, dry_run, force)
(status, details) = check_config_h()
self.debug_print("Python's GCC status: %s (details: %s)" %
(status, details))
if status is not CONFIG_H_OK:
self.warn(
"Python's pyconfig.h doesn't seem to support your compiler. "
"Reason: %s. "
"Compiling may fail because of undefined preprocessor macros."
% details)
self.gcc_version, self.ld_version, self.dllwrap_version = \
get_versions()
self.debug_print(self.compiler_type + ": gcc %s, ld %s, dllwrap %s\n" %
(self.gcc_version,
self.ld_version,
self.dllwrap_version) )
# ld_version >= "2.10.90" and < "2.13" should also be able to use
# gcc -mdll instead of dllwrap
# Older dllwraps had own version numbers, newer ones use the
# same as the rest of binutils ( also ld )
# dllwrap 2.10.90 is buggy
if self.ld_version >= "2.10.90":
self.linker_dll = "gcc"
else:
self.linker_dll = "dllwrap"
# ld_version >= "2.13" support -shared so use it instead of
# -mdll -static
if self.ld_version >= "2.13":
shared_option = "-shared"
else:
shared_option = "-mdll -static"
# Hard-code GCC because that's what this is all about.
# XXX optimization, warnings etc. should be customizable.
self.set_executables(compiler='gcc -mcygwin -O -Wall',
compiler_so='gcc -mcygwin -mdll -O -Wall',
compiler_cxx='g++ -mcygwin -O -Wall',
linker_exe='gcc -mcygwin',
linker_so=('%s -mcygwin %s' %
(self.linker_dll, shared_option)))
# cygwin and mingw32 need different sets of libraries
if self.gcc_version == "2.91.57":
# cygwin shouldn't need msvcrt, but without the dlls will crash
# (gcc version 2.91.57) -- perhaps something about initialization
self.dll_libraries=["msvcrt"]
self.warn(
"Consider upgrading to a newer version of gcc")
else:
# Include the appropriate MSVC runtime library if Python was built
# with MSVC 7.0 or later.
self.dll_libraries = get_msvcr()
# __init__ ()
def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts):
if ext == '.rc' or ext == '.res':
# gcc needs '.res' and '.rc' compiled to object files !!!
try:
self.spawn(["windres", "-i", src, "-o", obj])
except DistutilsExecError, msg:
raise CompileError, msg
else: # for other files use the C-compiler
try:
self.spawn(self.compiler_so + cc_args + [src, '-o', obj] +
extra_postargs)
except DistutilsExecError, msg:
raise CompileError, msg
def link (self,
target_desc,
objects,
output_filename,
output_dir=None,
libraries=None,
library_dirs=None,
runtime_library_dirs=None,
export_symbols=None,
debug=0,
extra_preargs=None,
extra_postargs=None,
build_temp=None,
target_lang=None):
# use separate copies, so we can modify the lists
extra_preargs = copy.copy(extra_preargs or [])
libraries = copy.copy(libraries or [])
objects = copy.copy(objects or [])
# Additional libraries
libraries.extend(self.dll_libraries)
# handle export symbols by creating a def-file
# with executables this only works with gcc/ld as linker
if ((export_symbols is not None) and
(target_desc != self.EXECUTABLE or self.linker_dll == "gcc")):
# (The linker doesn't do anything if output is up-to-date.
# So it would probably better to check if we really need this,
# but for this we had to insert some unchanged parts of
# UnixCCompiler, and this is not what we want.)
# we want to put some files in the same directory as the
# object files are, build_temp doesn't help much
# where are the object files
temp_dir = os.path.dirname(objects[0])
# name of dll to give the helper files the same base name
(dll_name, dll_extension) = os.path.splitext(
os.path.basename(output_filename))
# generate the filenames for these files
def_file = os.path.join(temp_dir, dll_name + ".def")
lib_file = os.path.join(temp_dir, 'lib' + dll_name + ".a")
# Generate .def file
contents = [
"LIBRARY %s" % os.path.basename(output_filename),
"EXPORTS"]
for sym in export_symbols:
contents.append(sym)
self.execute(write_file, (def_file, contents),
"writing %s" % def_file)
# next add options for def-file and to creating import libraries
# dllwrap uses different options than gcc/ld
if self.linker_dll == "dllwrap":
extra_preargs.extend(["--output-lib", lib_file])
# for dllwrap we have to use a special option
extra_preargs.extend(["--def", def_file])
# we use gcc/ld here and can be sure ld is >= 2.9.10
else:
# doesn't work: bfd_close build\...\libfoo.a: Invalid operation
#extra_preargs.extend(["-Wl,--out-implib,%s" % lib_file])
# for gcc/ld the def-file is specified as any object files
objects.append(def_file)
#end: if ((export_symbols is not None) and
# (target_desc != self.EXECUTABLE or self.linker_dll == "gcc")):
# who wants symbols and a many times larger output file
# should explicitly switch the debug mode on
# otherwise we let dllwrap/ld strip the output file
# (On my machine: 10KB < stripped_file < ??100KB
# unstripped_file = stripped_file + XXX KB
# ( XXX=254 for a typical python extension))
if not debug:
extra_preargs.append("-s")
UnixCCompiler.link(self,
target_desc,
objects,
output_filename,
output_dir,
libraries,
library_dirs,
runtime_library_dirs,
None, # export_symbols, we do this in our def-file
debug,
extra_preargs,
extra_postargs,
build_temp,
target_lang)
# link ()
# -- Miscellaneous methods -----------------------------------------
# overwrite the one from CCompiler to support rc and res-files
def object_filenames (self,
source_filenames,
strip_dir=0,
output_dir=''):
if output_dir is None: output_dir = ''
obj_names = []
for src_name in source_filenames:
# use normcase to make sure '.rc' is really '.rc' and not '.RC'
(base, ext) = os.path.splitext (os.path.normcase(src_name))
if ext not in (self.src_extensions + ['.rc','.res']):
raise UnknownFileError, \
"unknown file type '%s' (from '%s')" % \
(ext, src_name)
if strip_dir:
base = os.path.basename (base)
if ext == '.res' or ext == '.rc':
# these need to be compiled to object files
obj_names.append (os.path.join (output_dir,
base + ext + self.obj_extension))
else:
obj_names.append (os.path.join (output_dir,
base + self.obj_extension))
return obj_names
# object_filenames ()
# class CygwinCCompiler
# the same as cygwin plus some additional parameters
class Mingw32CCompiler (CygwinCCompiler):
compiler_type = 'mingw32'
def __init__ (self,
verbose=0,
dry_run=0,
force=0):
CygwinCCompiler.__init__ (self, verbose, dry_run, force)
# ld_version >= "2.13" support -shared so use it instead of
# -mdll -static
if self.ld_version >= "2.13":
shared_option = "-shared"
else:
shared_option = "-mdll -static"
# A real mingw32 doesn't need to specify a different entry point,
# but cygwin 2.91.57 in no-cygwin-mode needs it.
if self.gcc_version <= "2.91.57":
entry_point = '--entry _DllMain@12'
else:
entry_point = ''
self.set_executables(compiler='gcc -mno-cygwin -O -Wall',
compiler_so='gcc -mno-cygwin -mdll -O -Wall',
compiler_cxx='g++ -mno-cygwin -O -Wall',
linker_exe='gcc -mno-cygwin',
linker_so='%s -mno-cygwin %s %s'
% (self.linker_dll, shared_option,
entry_point))
# Maybe we should also append -mthreads, but then the finished
# dlls need another dll (mingwm10.dll see Mingw32 docs)
# (-mthreads: Support thread-safe exception handling on `Mingw32')
# no additional libraries needed
self.dll_libraries=[]
# Include the appropriate MSVC runtime library if Python was built
# with MSVC 7.0 or later.
self.dll_libraries = get_msvcr()
# __init__ ()
# class Mingw32CCompiler
# Because these compilers aren't configured in Python's pyconfig.h file by
# default, we should at least warn the user if he is using a unmodified
# version.
CONFIG_H_OK = "ok"
CONFIG_H_NOTOK = "not ok"
CONFIG_H_UNCERTAIN = "uncertain"
def check_config_h():
"""Check if the current Python installation (specifically, pyconfig.h)
appears amenable to building extensions with GCC. Returns a tuple
(status, details), where 'status' is one of the following constants:
CONFIG_H_OK
all is well, go ahead and compile
CONFIG_H_NOTOK
doesn't look good
CONFIG_H_UNCERTAIN
not sure -- unable to read pyconfig.h
'details' is a human-readable string explaining the situation.
Note there are two ways to conclude "OK": either 'sys.version' contains
the string "GCC" (implying that this Python was built with GCC), or the
installed "pyconfig.h" contains the string "__GNUC__".
"""
# XXX since this function also checks sys.version, it's not strictly a
# "pyconfig.h" check -- should probably be renamed...
from distutils import sysconfig
import string
# if sys.version contains GCC then python was compiled with
# GCC, and the pyconfig.h file should be OK
if string.find(sys.version,"GCC") >= 0:
return (CONFIG_H_OK, "sys.version mentions 'GCC'")
fn = sysconfig.get_config_h_filename()
try:
# It would probably better to read single lines to search.
# But we do this only once, and it is fast enough
f = open(fn)
s = f.read()
f.close()
except IOError, exc:
# if we can't read this file, we cannot say it is wrong
# the compiler will complain later about this file as missing
return (CONFIG_H_UNCERTAIN,
"couldn't read '%s': %s" % (fn, exc.strerror))
else:
# "pyconfig.h" contains an "#ifdef __GNUC__" or something similar
if string.find(s,"__GNUC__") >= 0:
return (CONFIG_H_OK, "'%s' mentions '__GNUC__'" % fn)
else:
return (CONFIG_H_NOTOK, "'%s' does not mention '__GNUC__'" % fn)
def get_versions():
""" Try to find out the versions of gcc, ld and dllwrap.
If not possible it returns None for it.
"""
from distutils.version import LooseVersion
from distutils.spawn import find_executable
import re
gcc_exe = find_executable('gcc')
if gcc_exe:
out = os.popen(gcc_exe + ' -dumpversion','r')
out_string = out.read()
out.close()
result = re.search('(\d+\.\d+(\.\d+)*)',out_string)
if result:
gcc_version = LooseVersion(result.group(1))
else:
gcc_version = None
else:
gcc_version = None
ld_exe = find_executable('ld')
if ld_exe:
out = os.popen(ld_exe + ' -v','r')
out_string = out.read()
out.close()
result = re.search('(\d+\.\d+(\.\d+)*)',out_string)
if result:
ld_version = LooseVersion(result.group(1))
else:
ld_version = None
else:
ld_version = None
dllwrap_exe = find_executable('dllwrap')
if dllwrap_exe:
out = os.popen(dllwrap_exe + ' --version','r')
out_string = out.read()
out.close()
result = re.search(' (\d+\.\d+(\.\d+)*)',out_string)
if result:
dllwrap_version = LooseVersion(result.group(1))
else:
dllwrap_version = None
else:
dllwrap_version = None
return (gcc_version, ld_version, dllwrap_version)
| apache-2.0 | 1,542,957,634,429,855,000 | 37.700224 | 88 | 0.558876 | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.