repo_name
stringlengths 5
100
| ref
stringlengths 12
67
| path
stringlengths 4
244
| copies
stringlengths 1
8
| content
stringlengths 0
1.05M
⌀ |
---|---|---|---|---|
shannpersand/cooper-type | refs/heads/master | workshops/Python Workshop/Just/2016-06-21 Cooper workshop day 2/16 type specimen columns.py | 1 | txt = """Lorem ipsum dolor sit amet, consectetuer adipiscing elit, sed diam nonummy nibh euismod tincidunt ut laoreet dolore magna aliquam erat volutpat.
Ut wisi enim ad minim veniam, quis nostrud exerci tation ullamcorper suscipit lobortis nisl ut aliquip ex ea commodo consequat. Duis autem vel eum iriure dolor in hendrerit in vulputate velit esse molestie consequat, vel illum dolore eu feugiat nulla facilisis at vero eros et accumsan et iusto odio dignissim qui blandit praesent luptatum zzril delenit augue duis dolore te feugait nulla facilisi. Nam liber tempor cum soluta nobis eleifend option congue nihil imperdiet doming id quod mazim placerat facer possim assum. Typi non habent claritatem insitam; est usus legentis in iis qui facit eorum claritatem. Investigationes demonstraverunt lectores legere me lius quod ii legunt saepius. Claritas est etiam processus dynamicus, qui sequitur mutationem consuetudium lectorum.
Mirum est notare quam littera gothica, quam nunc putamus parum claram, anteposuerit litterarum formas humanitatis per seacula quarta decima et quinta decima. Eodem modo typi, qui nunc nobis videntur parum clari, fiant sollemnes in futurum."""
fontList = installedFonts()
# print len(fontList)
margin = 80
gutter = 10
numColumns = 2
for fontName in fontList[400:420]:
newPage("A4")
font(fontName)
fontSize(20)
text(fontName, (margin, height() - margin))
fontSize(14)
hyphenation(True)
columnWidth = (width() - 2 * margin - gutter * (numColumns - 1)) / numColumns
columnHeight = height() - 300
copy = txt
for i in range(numColumns):
x = margin + i * (columnWidth + gutter)
y = margin
fill(0.9)
rect(x, y, columnWidth, columnHeight)
fill(0)
copy = copy.lstrip("\n") # strip any leading blank lines
copy = textBox(copy, (x, y, columnWidth, columnHeight))
font("Curlz MT")
fontSize(14)
text(str(pageCount()), (margin, margin/2))
|
nsoranzo/tools-iuc | refs/heads/master | tools/qiime/qiime_core/beta_diversity_through_plots_html_generation.py | 21 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import argparse
import os
import re
def generate_index_html(dir_list, args):
with open(args.html_file, 'w') as index_html_file:
s = ""
s += '<html>\n'
s += '\t<head><title>PCoA beta diversity results</title></head>\n'
s += '\t<body>\n'
s += '\t\t<a href="http://www.qiime.org" target="_blank">'
s += '<img src="http://qiime.org/_static/wordpressheader.png" '
s += 'alt="www.qiime.org""/></a>\n'
s += '\t\t<p>\n'
s += '\t\t\tBeta diversity metrics\n'
s += '\t\t\t<ul>\n'
for directory in dir_list:
regexp_result = re.search(
r'([a-zA-Z\_]*)_emperor_pcoa_plot',
directory)
metric = regexp_result.group(1)
s += '\t\t\t\t<li>' + metric + ': '
s += '<a href="' + directory
s += '/index.html">PCoA results</a></td>\n'
s += '\t\t\t\t</li>\n'
s += '\t\t\t</ul>\n'
s += '\t\t</p>\n'
s += '\t</body>\n'
s += '</html>\n'
index_html_file.write(s)
def build_html(args):
os.mkdir(args.html_dir)
dir_list = [name for name in os.listdir(args.data_directory)
if os.path.isdir(os.path.join(
args.data_directory,
name))]
generate_index_html(dir_list, args)
for directory in dir_list:
input_path = os.path.join(args.data_directory, directory)
cmd = 'cp -r ' + input_path + ' ' + args.html_dir
os.system(cmd)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--data_directory', required=True)
parser.add_argument('--html_file', required=True)
parser.add_argument('--html_dir', required=True)
args = parser.parse_args()
build_html(args)
|
obi-two/Rebelion | refs/heads/master | data/scripts/templates/object/tangible/component/weapon/shared_blaster_barrel_wp_muzzle_sm_s01_lg.py | 2 | #### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Tangible()
result.template = "object/tangible/component/weapon/shared_blaster_barrel_wp_muzzle_sm_s01_lg.iff"
result.attribute_template_id = -1
result.stfName("craft_weapon_ingredients_n","blaster_barrel_sm_t1")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result |
kaulkie/keyczar | refs/heads/master | cpp/src/tools/scons/scons-local-1.2.0.d20090223/SCons/Tool/msvs.py | 19 | """SCons.Tool.msvs
Tool-specific initialization for Microsoft Visual Studio project files.
There normally shouldn't be any need to import this module directly.
It will usually be imported through the generic SCons.Tool.Tool()
selection method.
"""
#
# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Tool/msvs.py 4043 2009/02/23 09:06:45 scons"
import base64
import hashlib
import ntpath
import os
import pickle
import re
import string
import sys
import SCons.Builder
import SCons.Node.FS
import SCons.Platform.win32
import SCons.Script.SConscript
import SCons.Util
import SCons.Warnings
from MSCommon import detect_msvs, merge_default_version
##############################################################################
# Below here are the classes and functions for generation of
# DSP/DSW/SLN/VCPROJ files.
##############################################################################
def _hexdigest(s):
"""Return a string as a string of hex characters.
"""
# NOTE: This routine is a method in the Python 2.0 interface
# of the native md5 module, but we want SCons to operate all
# the way back to at least Python 1.5.2, which doesn't have it.
h = string.hexdigits
r = ''
for c in s:
i = ord(c)
r = r + h[(i >> 4) & 0xF] + h[i & 0xF]
return r
def xmlify(s):
s = string.replace(s, "&", "&") # do this first
s = string.replace(s, "'", "'")
s = string.replace(s, '"', """)
return s
external_makefile_guid = '{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}'
def _generateGUID(slnfile, name):
"""This generates a dummy GUID for the sln file to use. It is
based on the MD5 signatures of the sln filename plus the name of
the project. It basically just needs to be unique, and not
change with each invocation."""
m = hashlib.md5()
# Normalize the slnfile path to a Windows path (\ separators) so
# the generated file has a consistent GUID even if we generate
# it on a non-Windows platform.
m.update(ntpath.normpath(str(slnfile)) + str(name))
# TODO(1.5)
#solution = m.hexdigest().upper()
solution = string.upper(_hexdigest(m.digest()))
# convert most of the signature to GUID form (discard the rest)
solution = "{" + solution[:8] + "-" + solution[8:12] + "-" + solution[12:16] + "-" + solution[16:20] + "-" + solution[20:32] + "}"
return solution
version_re = re.compile(r'(\d+\.\d+)(.*)')
def msvs_parse_version(s):
"""
Split a Visual Studio version, which may in fact be something like
'7.0Exp', into is version number (returned as a float) and trailing
"suite" portion.
"""
num, suite = version_re.match(s).groups()
return float(num), suite
# This is how we re-invoke SCons from inside MSVS Project files.
# The problem is that we might have been invoked as either scons.bat
# or scons.py. If we were invoked directly as scons.py, then we could
# use sys.argv[0] to find the SCons "executable," but that doesn't work
# if we were invoked as scons.bat, which uses "python -c" to execute
# things and ends up with "-c" as sys.argv[0]. Consequently, we have
# the MSVS Project file invoke SCons the same way that scons.bat does,
# which works regardless of how we were invoked.
def getExecScriptMain(env, xml=None):
scons_home = env.get('SCONS_HOME')
if not scons_home and os.environ.has_key('SCONS_LIB_DIR'):
scons_home = os.environ['SCONS_LIB_DIR']
if scons_home:
exec_script_main = "from os.path import join; import sys; sys.path = [ r'%s' ] + sys.path; import SCons.Script; SCons.Script.main()" % scons_home
else:
version = SCons.__version__
exec_script_main = "from os.path import join; import sys; sys.path = [ join(sys.prefix, 'Lib', 'site-packages', 'scons-%(version)s'), join(sys.prefix, 'scons-%(version)s'), join(sys.prefix, 'Lib', 'site-packages', 'scons'), join(sys.prefix, 'scons') ] + sys.path; import SCons.Script; SCons.Script.main()" % locals()
if xml:
exec_script_main = xmlify(exec_script_main)
return exec_script_main
# The string for the Python executable we tell the Project file to use
# is either sys.executable or, if an external PYTHON_ROOT environment
# variable exists, $(PYTHON)ROOT\\python.exe (generalized a little to
# pluck the actual executable name from sys.executable).
try:
python_root = os.environ['PYTHON_ROOT']
except KeyError:
python_executable = sys.executable
else:
python_executable = os.path.join('$$(PYTHON_ROOT)',
os.path.split(sys.executable)[1])
class Config:
pass
def splitFully(path):
dir, base = os.path.split(path)
if dir and dir != '' and dir != path:
return splitFully(dir)+[base]
if base == '':
return []
return [base]
def makeHierarchy(sources):
'''Break a list of files into a hierarchy; for each value, if it is a string,
then it is a file. If it is a dictionary, it is a folder. The string is
the original path of the file.'''
hierarchy = {}
for file in sources:
path = splitFully(file)
if len(path):
dict = hierarchy
for part in path[:-1]:
if not dict.has_key(part):
dict[part] = {}
dict = dict[part]
dict[path[-1]] = file
#else:
# print 'Warning: failed to decompose path for '+str(file)
return hierarchy
class _DSPGenerator:
""" Base class for DSP generators """
srcargs = [
'srcs',
'incs',
'localincs',
'resources',
'misc']
def __init__(self, dspfile, source, env):
self.dspfile = str(dspfile)
try:
get_abspath = dspfile.get_abspath
except AttributeError:
self.dspabs = os.path.abspath(dspfile)
else:
self.dspabs = get_abspath()
if not env.has_key('variant'):
raise SCons.Errors.InternalError, \
"You must specify a 'variant' argument (i.e. 'Debug' or " +\
"'Release') to create an MSVSProject."
elif SCons.Util.is_String(env['variant']):
variants = [env['variant']]
elif SCons.Util.is_List(env['variant']):
variants = env['variant']
if not env.has_key('buildtarget') or env['buildtarget'] == None:
buildtarget = ['']
elif SCons.Util.is_String(env['buildtarget']):
buildtarget = [env['buildtarget']]
elif SCons.Util.is_List(env['buildtarget']):
if len(env['buildtarget']) != len(variants):
raise SCons.Errors.InternalError, \
"Sizes of 'buildtarget' and 'variant' lists must be the same."
buildtarget = []
for bt in env['buildtarget']:
if SCons.Util.is_String(bt):
buildtarget.append(bt)
else:
buildtarget.append(bt.get_abspath())
else:
buildtarget = [env['buildtarget'].get_abspath()]
if len(buildtarget) == 1:
bt = buildtarget[0]
buildtarget = []
for _ in variants:
buildtarget.append(bt)
if not env.has_key('outdir') or env['outdir'] == None:
outdir = ['']
elif SCons.Util.is_String(env['outdir']):
outdir = [env['outdir']]
elif SCons.Util.is_List(env['outdir']):
if len(env['outdir']) != len(variants):
raise SCons.Errors.InternalError, \
"Sizes of 'outdir' and 'variant' lists must be the same."
outdir = []
for s in env['outdir']:
if SCons.Util.is_String(s):
outdir.append(s)
else:
outdir.append(s.get_abspath())
else:
outdir = [env['outdir'].get_abspath()]
if len(outdir) == 1:
s = outdir[0]
outdir = []
for v in variants:
outdir.append(s)
if not env.has_key('runfile') or env['runfile'] == None:
runfile = buildtarget[-1:]
elif SCons.Util.is_String(env['runfile']):
runfile = [env['runfile']]
elif SCons.Util.is_List(env['runfile']):
if len(env['runfile']) != len(variants):
raise SCons.Errors.InternalError, \
"Sizes of 'runfile' and 'variant' lists must be the same."
runfile = []
for s in env['runfile']:
if SCons.Util.is_String(s):
runfile.append(s)
else:
runfile.append(s.get_abspath())
else:
runfile = [env['runfile'].get_abspath()]
if len(runfile) == 1:
s = runfile[0]
runfile = []
for v in variants:
runfile.append(s)
self.sconscript = env['MSVSSCONSCRIPT']
cmdargs = env.get('cmdargs', '')
self.env = env
if self.env.has_key('name'):
self.name = self.env['name']
else:
self.name = os.path.basename(SCons.Util.splitext(self.dspfile)[0])
self.name = self.env.subst(self.name)
sourcenames = [
'Source Files',
'Header Files',
'Local Headers',
'Resource Files',
'Other Files']
self.sources = {}
for n in sourcenames:
self.sources[n] = []
self.configs = {}
self.nokeep = 0
if env.has_key('nokeep') and env['variant'] != 0:
self.nokeep = 1
if self.nokeep == 0 and os.path.exists(self.dspabs):
self.Parse()
for t in zip(sourcenames,self.srcargs):
if self.env.has_key(t[1]):
if SCons.Util.is_List(self.env[t[1]]):
for i in self.env[t[1]]:
if not i in self.sources[t[0]]:
self.sources[t[0]].append(i)
else:
if not self.env[t[1]] in self.sources[t[0]]:
self.sources[t[0]].append(self.env[t[1]])
for n in sourcenames:
# TODO(1.5):
#self.sources[n].sort(lambda a, b: cmp(a.lower(), b.lower()))
self.sources[n].sort(lambda a, b: cmp(string.lower(a), string.lower(b)))
def AddConfig(self, variant, buildtarget, outdir, runfile, cmdargs, dspfile=dspfile):
config = Config()
config.buildtarget = buildtarget
config.outdir = outdir
config.cmdargs = cmdargs
config.runfile = runfile
match = re.match('(.*)\|(.*)', variant)
if match:
config.variant = match.group(1)
config.platform = match.group(2)
else:
config.variant = variant
config.platform = 'Win32'
self.configs[variant] = config
print "Adding '" + self.name + ' - ' + config.variant + '|' + config.platform + "' to '" + str(dspfile) + "'"
for i in range(len(variants)):
AddConfig(self, variants[i], buildtarget[i], outdir[i], runfile[i], cmdargs)
self.platforms = []
for key in self.configs.keys():
platform = self.configs[key].platform
if not platform in self.platforms:
self.platforms.append(platform)
def Build(self):
pass
V6DSPHeader = """\
# Microsoft Developer Studio Project File - Name="%(name)s" - Package Owner=<4>
# Microsoft Developer Studio Generated Build File, Format Version 6.00
# ** DO NOT EDIT **
# TARGTYPE "Win32 (x86) External Target" 0x0106
CFG=%(name)s - Win32 %(confkey)s
!MESSAGE This is not a valid makefile. To build this project using NMAKE,
!MESSAGE use the Export Makefile command and run
!MESSAGE
!MESSAGE NMAKE /f "%(name)s.mak".
!MESSAGE
!MESSAGE You can specify a configuration when running NMAKE
!MESSAGE by defining the macro CFG on the command line. For example:
!MESSAGE
!MESSAGE NMAKE /f "%(name)s.mak" CFG="%(name)s - Win32 %(confkey)s"
!MESSAGE
!MESSAGE Possible choices for configuration are:
!MESSAGE
"""
class _GenerateV6DSP(_DSPGenerator):
"""Generates a Project file for MSVS 6.0"""
def PrintHeader(self):
# pick a default config
confkeys = self.configs.keys()
confkeys.sort()
name = self.name
confkey = confkeys[0]
self.file.write(V6DSPHeader % locals())
for kind in confkeys:
self.file.write('!MESSAGE "%s - Win32 %s" (based on "Win32 (x86) External Target")\n' % (name, kind))
self.file.write('!MESSAGE \n\n')
def PrintProject(self):
name = self.name
self.file.write('# Begin Project\n'
'# PROP AllowPerConfigDependencies 0\n'
'# PROP Scc_ProjName ""\n'
'# PROP Scc_LocalPath ""\n\n')
first = 1
confkeys = self.configs.keys()
confkeys.sort()
for kind in confkeys:
outdir = self.configs[kind].outdir
buildtarget = self.configs[kind].buildtarget
if first == 1:
self.file.write('!IF "$(CFG)" == "%s - Win32 %s"\n\n' % (name, kind))
first = 0
else:
self.file.write('\n!ELSEIF "$(CFG)" == "%s - Win32 %s"\n\n' % (name, kind))
env_has_buildtarget = self.env.has_key('MSVSBUILDTARGET')
if not env_has_buildtarget:
self.env['MSVSBUILDTARGET'] = buildtarget
# have to write this twice, once with the BASE settings, and once without
for base in ("BASE ",""):
self.file.write('# PROP %sUse_MFC 0\n'
'# PROP %sUse_Debug_Libraries ' % (base, base))
# TODO(1.5):
#if kind.lower().find('debug') < 0:
if string.find(string.lower(kind), 'debug') < 0:
self.file.write('0\n')
else:
self.file.write('1\n')
self.file.write('# PROP %sOutput_Dir "%s"\n'
'# PROP %sIntermediate_Dir "%s"\n' % (base,outdir,base,outdir))
cmd = 'echo Starting SCons && ' + self.env.subst('$MSVSBUILDCOM', 1)
self.file.write('# PROP %sCmd_Line "%s"\n'
'# PROP %sRebuild_Opt "-c && %s"\n'
'# PROP %sTarget_File "%s"\n'
'# PROP %sBsc_Name ""\n'
'# PROP %sTarget_Dir ""\n'\
%(base,cmd,base,cmd,base,buildtarget,base,base))
if not env_has_buildtarget:
del self.env['MSVSBUILDTARGET']
self.file.write('\n!ENDIF\n\n'
'# Begin Target\n\n')
for kind in confkeys:
self.file.write('# Name "%s - Win32 %s"\n' % (name,kind))
self.file.write('\n')
first = 0
for kind in confkeys:
if first == 0:
self.file.write('!IF "$(CFG)" == "%s - Win32 %s"\n\n' % (name,kind))
first = 1
else:
self.file.write('!ELSEIF "$(CFG)" == "%s - Win32 %s"\n\n' % (name,kind))
self.file.write('!ENDIF \n\n')
self.PrintSourceFiles()
self.file.write('# End Target\n'
'# End Project\n')
if self.nokeep == 0:
# now we pickle some data and add it to the file -- MSDEV will ignore it.
pdata = pickle.dumps(self.configs,1)
pdata = base64.encodestring(pdata)
self.file.write(pdata + '\n')
pdata = pickle.dumps(self.sources,1)
pdata = base64.encodestring(pdata)
self.file.write(pdata + '\n')
def PrintSourceFiles(self):
categories = {'Source Files': 'cpp|c|cxx|l|y|def|odl|idl|hpj|bat',
'Header Files': 'h|hpp|hxx|hm|inl',
'Local Headers': 'h|hpp|hxx|hm|inl',
'Resource Files': 'r|rc|ico|cur|bmp|dlg|rc2|rct|bin|cnt|rtf|gif|jpg|jpeg|jpe',
'Other Files': ''}
cats = categories.keys()
# TODO(1.5):
#cats.sort(lambda a, b: cmp(a.lower(), b.lower()))
cats.sort(lambda a, b: cmp(string.lower(a), string.lower(b)))
for kind in cats:
if not self.sources[kind]:
continue # skip empty groups
self.file.write('# Begin Group "' + kind + '"\n\n')
# TODO(1.5)
#typelist = categories[kind].replace('|', ';')
typelist = string.replace(categories[kind], '|', ';')
self.file.write('# PROP Default_Filter "' + typelist + '"\n')
for file in self.sources[kind]:
file = os.path.normpath(file)
self.file.write('# Begin Source File\n\n'
'SOURCE="' + file + '"\n'
'# End Source File\n')
self.file.write('# End Group\n')
# add the SConscript file outside of the groups
self.file.write('# Begin Source File\n\n'
'SOURCE="' + str(self.sconscript) + '"\n'
'# End Source File\n')
def Parse(self):
try:
dspfile = open(self.dspabs,'r')
except IOError:
return # doesn't exist yet, so can't add anything to configs.
line = dspfile.readline()
while line:
# TODO(1.5):
#if line.find("# End Project") > -1:
if string.find(line, "# End Project") > -1:
break
line = dspfile.readline()
line = dspfile.readline()
datas = line
while line and line != '\n':
line = dspfile.readline()
datas = datas + line
# OK, we've found our little pickled cache of data.
try:
datas = base64.decodestring(datas)
data = pickle.loads(datas)
except KeyboardInterrupt:
raise
except:
return # unable to unpickle any data for some reason
self.configs.update(data)
data = None
line = dspfile.readline()
datas = line
while line and line != '\n':
line = dspfile.readline()
datas = datas + line
# OK, we've found our little pickled cache of data.
# it has a "# " in front of it, so we strip that.
try:
datas = base64.decodestring(datas)
data = pickle.loads(datas)
except KeyboardInterrupt:
raise
except:
return # unable to unpickle any data for some reason
self.sources.update(data)
def Build(self):
try:
self.file = open(self.dspabs,'w')
except IOError, detail:
raise SCons.Errors.InternalError, 'Unable to open "' + self.dspabs + '" for writing:' + str(detail)
else:
self.PrintHeader()
self.PrintProject()
self.file.close()
V7DSPHeader = """\
<?xml version="1.0" encoding = "%(encoding)s"?>
<VisualStudioProject
\tProjectType="Visual C++"
\tVersion="%(versionstr)s"
\tName="%(name)s"
%(scc_attrs)s
\tKeyword="MakeFileProj">
"""
V7DSPConfiguration = """\
\t\t<Configuration
\t\t\tName="%(variant)s|%(platform)s"
\t\t\tOutputDirectory="%(outdir)s"
\t\t\tIntermediateDirectory="%(outdir)s"
\t\t\tConfigurationType="0"
\t\t\tUseOfMFC="0"
\t\t\tATLMinimizesCRunTimeLibraryUsage="FALSE">
\t\t\t<Tool
\t\t\t\tName="VCNMakeTool"
\t\t\t\tBuildCommandLine="%(buildcmd)s"
\t\t\t\tCleanCommandLine="%(cleancmd)s"
\t\t\t\tRebuildCommandLine="%(rebuildcmd)s"
\t\t\t\tOutput="%(runfile)s"/>
\t\t</Configuration>
"""
V8DSPHeader = """\
<?xml version="1.0" encoding="%(encoding)s"?>
<VisualStudioProject
\tProjectType="Visual C++"
\tVersion="%(versionstr)s"
\tName="%(name)s"
%(scc_attrs)s
\tRootNamespace="%(name)s"
\tKeyword="MakeFileProj">
"""
V8DSPConfiguration = """\
\t\t<Configuration
\t\t\tName="%(variant)s|%(platform)s"
\t\t\tConfigurationType="0"
\t\t\tUseOfMFC="0"
\t\t\tATLMinimizesCRunTimeLibraryUsage="false"
\t\t\t>
\t\t\t<Tool
\t\t\t\tName="VCNMakeTool"
\t\t\t\tBuildCommandLine="%(buildcmd)s"
\t\t\t\tReBuildCommandLine="%(rebuildcmd)s"
\t\t\t\tCleanCommandLine="%(cleancmd)s"
\t\t\t\tOutput="%(runfile)s"
\t\t\t\tPreprocessorDefinitions="%(preprocdefs)s"
\t\t\t\tIncludeSearchPath="%(includepath)s"
\t\t\t\tForcedIncludes=""
\t\t\t\tAssemblySearchPath=""
\t\t\t\tForcedUsingAssemblies=""
\t\t\t\tCompileAsManaged=""
\t\t\t/>
\t\t</Configuration>
"""
class _GenerateV7DSP(_DSPGenerator):
"""Generates a Project file for MSVS .NET"""
def __init__(self, dspfile, source, env):
_DSPGenerator.__init__(self, dspfile, source, env)
self.version = env['MSVS_VERSION']
self.version_num, self.suite = msvs_parse_version(self.version)
if self.version_num >= 8.0:
self.versionstr = '8.00'
self.dspheader = V8DSPHeader
self.dspconfiguration = V8DSPConfiguration
else:
if self.version_num >= 7.1:
self.versionstr = '7.10'
else:
self.versionstr = '7.00'
self.dspheader = V7DSPHeader
self.dspconfiguration = V7DSPConfiguration
self.file = None
def PrintHeader(self):
env = self.env
versionstr = self.versionstr
name = self.name
encoding = self.env.subst('$MSVSENCODING')
scc_provider = env.get('MSVS_SCC_PROVIDER', '')
scc_project_name = env.get('MSVS_SCC_PROJECT_NAME', '')
scc_aux_path = env.get('MSVS_SCC_AUX_PATH', '')
scc_local_path = env.get('MSVS_SCC_LOCAL_PATH', '')
project_guid = env.get('MSVS_PROJECT_GUID', '')
if self.version_num >= 8.0 and not project_guid:
project_guid = _generateGUID(self.dspfile, '')
if scc_provider != '':
scc_attrs = ('\tProjectGUID="%s"\n'
'\tSccProjectName="%s"\n'
'\tSccAuxPath="%s"\n'
'\tSccLocalPath="%s"\n'
'\tSccProvider="%s"' % (project_guid, scc_project_name, scc_aux_path, scc_local_path, scc_provider))
else:
scc_attrs = ('\tProjectGUID="%s"\n'
'\tSccProjectName="%s"\n'
'\tSccLocalPath="%s"' % (project_guid, scc_project_name, scc_local_path))
self.file.write(self.dspheader % locals())
self.file.write('\t<Platforms>\n')
for platform in self.platforms:
self.file.write(
'\t\t<Platform\n'
'\t\t\tName="%s"/>\n' % platform)
self.file.write('\t</Platforms>\n')
if self.version_num >= 8.0:
self.file.write('\t<ToolFiles>\n'
'\t</ToolFiles>\n')
def PrintProject(self):
self.file.write('\t<Configurations>\n')
confkeys = self.configs.keys()
confkeys.sort()
for kind in confkeys:
variant = self.configs[kind].variant
platform = self.configs[kind].platform
outdir = self.configs[kind].outdir
buildtarget = self.configs[kind].buildtarget
runfile = self.configs[kind].runfile
cmdargs = self.configs[kind].cmdargs
env_has_buildtarget = self.env.has_key('MSVSBUILDTARGET')
if not env_has_buildtarget:
self.env['MSVSBUILDTARGET'] = buildtarget
starting = 'echo Starting SCons && '
if cmdargs:
cmdargs = ' ' + cmdargs
else:
cmdargs = ''
buildcmd = xmlify(starting + self.env.subst('$MSVSBUILDCOM', 1) + cmdargs)
rebuildcmd = xmlify(starting + self.env.subst('$MSVSREBUILDCOM', 1) + cmdargs)
cleancmd = xmlify(starting + self.env.subst('$MSVSCLEANCOM', 1) + cmdargs)
# TODO(1.5)
#preprocdefs = xmlify(';'.join(self.env.get('CPPDEFINES', [])))
#includepath = xmlify(';'.join(self.env.get('CPPPATH', [])))
preprocdefs = xmlify(string.join(self.env.get('CPPDEFINES', []), ';'))
includepath = xmlify(string.join(self.env.get('CPPPATH', []), ';'))
if not env_has_buildtarget:
del self.env['MSVSBUILDTARGET']
self.file.write(self.dspconfiguration % locals())
self.file.write('\t</Configurations>\n')
if self.version_num >= 7.1:
self.file.write('\t<References>\n'
'\t</References>\n')
self.PrintSourceFiles()
self.file.write('</VisualStudioProject>\n')
if self.nokeep == 0:
# now we pickle some data and add it to the file -- MSDEV will ignore it.
pdata = pickle.dumps(self.configs,1)
pdata = base64.encodestring(pdata)
self.file.write('<!-- SCons Data:\n' + pdata + '\n')
pdata = pickle.dumps(self.sources,1)
pdata = base64.encodestring(pdata)
self.file.write(pdata + '-->\n')
def printSources(self, hierarchy, commonprefix):
sorteditems = hierarchy.items()
# TODO(1.5):
#sorteditems.sort(lambda a, b: cmp(a[0].lower(), b[0].lower()))
sorteditems.sort(lambda a, b: cmp(string.lower(a[0]), string.lower(b[0])))
# First folders, then files
for key, value in sorteditems:
if SCons.Util.is_Dict(value):
self.file.write('\t\t\t<Filter\n'
'\t\t\t\tName="%s"\n'
'\t\t\t\tFilter="">\n' % (key))
self.printSources(value, commonprefix)
self.file.write('\t\t\t</Filter>\n')
for key, value in sorteditems:
if SCons.Util.is_String(value):
file = value
if commonprefix:
file = os.path.join(commonprefix, value)
file = os.path.normpath(file)
self.file.write('\t\t\t<File\n'
'\t\t\t\tRelativePath="%s">\n'
'\t\t\t</File>\n' % (file))
def PrintSourceFiles(self):
categories = {'Source Files': 'cpp;c;cxx;l;y;def;odl;idl;hpj;bat',
'Header Files': 'h;hpp;hxx;hm;inl',
'Local Headers': 'h;hpp;hxx;hm;inl',
'Resource Files': 'r;rc;ico;cur;bmp;dlg;rc2;rct;bin;cnt;rtf;gif;jpg;jpeg;jpe',
'Other Files': ''}
self.file.write('\t<Files>\n')
cats = categories.keys()
# TODO(1.5)
#cats.sort(lambda a, b: cmp(a.lower(), b.lower()))
cats.sort(lambda a, b: cmp(string.lower(a), string.lower(b)))
cats = filter(lambda k, s=self: s.sources[k], cats)
for kind in cats:
if len(cats) > 1:
self.file.write('\t\t<Filter\n'
'\t\t\tName="%s"\n'
'\t\t\tFilter="%s">\n' % (kind, categories[kind]))
sources = self.sources[kind]
# First remove any common prefix
commonprefix = None
if len(sources) > 1:
s = map(os.path.normpath, sources)
# take the dirname because the prefix may include parts
# of the filenames (e.g. if you have 'dir\abcd' and
# 'dir\acde' then the cp will be 'dir\a' )
cp = os.path.dirname( os.path.commonprefix(s) )
if cp and s[0][len(cp)] == os.sep:
# +1 because the filename starts after the separator
sources = map(lambda s, l=len(cp)+1: s[l:], sources)
commonprefix = cp
elif len(sources) == 1:
commonprefix = os.path.dirname( sources[0] )
sources[0] = os.path.basename( sources[0] )
hierarchy = makeHierarchy(sources)
self.printSources(hierarchy, commonprefix=commonprefix)
if len(cats)>1:
self.file.write('\t\t</Filter>\n')
# add the SConscript file outside of the groups
self.file.write('\t\t<File\n'
'\t\t\tRelativePath="%s">\n'
'\t\t</File>\n' % str(self.sconscript))
self.file.write('\t</Files>\n'
'\t<Globals>\n'
'\t</Globals>\n')
def Parse(self):
try:
dspfile = open(self.dspabs,'r')
except IOError:
return # doesn't exist yet, so can't add anything to configs.
line = dspfile.readline()
while line:
# TODO(1.5)
#if line.find('<!-- SCons Data:') > -1:
if string.find(line, '<!-- SCons Data:') > -1:
break
line = dspfile.readline()
line = dspfile.readline()
datas = line
while line and line != '\n':
line = dspfile.readline()
datas = datas + line
# OK, we've found our little pickled cache of data.
try:
datas = base64.decodestring(datas)
data = pickle.loads(datas)
except KeyboardInterrupt:
raise
except:
return # unable to unpickle any data for some reason
self.configs.update(data)
data = None
line = dspfile.readline()
datas = line
while line and line != '\n':
line = dspfile.readline()
datas = datas + line
# OK, we've found our little pickled cache of data.
try:
datas = base64.decodestring(datas)
data = pickle.loads(datas)
except KeyboardInterrupt:
raise
except:
return # unable to unpickle any data for some reason
self.sources.update(data)
def Build(self):
try:
self.file = open(self.dspabs,'w')
except IOError, detail:
raise SCons.Errors.InternalError, 'Unable to open "' + self.dspabs + '" for writing:' + str(detail)
else:
self.PrintHeader()
self.PrintProject()
self.file.close()
class _DSWGenerator:
""" Base class for DSW generators """
def __init__(self, dswfile, source, env):
self.dswfile = os.path.normpath(str(dswfile))
self.env = env
if not env.has_key('projects'):
raise SCons.Errors.UserError, \
"You must specify a 'projects' argument to create an MSVSSolution."
projects = env['projects']
if not SCons.Util.is_List(projects):
raise SCons.Errors.InternalError, \
"The 'projects' argument must be a list of nodes."
projects = SCons.Util.flatten(projects)
if len(projects) < 1:
raise SCons.Errors.UserError, \
"You must specify at least one project to create an MSVSSolution."
self.dspfiles = map(str, projects)
if self.env.has_key('name'):
self.name = self.env['name']
else:
self.name = os.path.basename(SCons.Util.splitext(self.dswfile)[0])
self.name = self.env.subst(self.name)
def Build(self):
pass
class _GenerateV7DSW(_DSWGenerator):
"""Generates a Solution file for MSVS .NET"""
def __init__(self, dswfile, source, env):
_DSWGenerator.__init__(self, dswfile, source, env)
self.file = None
self.version = self.env['MSVS_VERSION']
self.version_num, self.suite = msvs_parse_version(self.version)
self.versionstr = '7.00'
if self.version_num >= 8.0:
self.versionstr = '9.00'
elif self.version_num >= 7.1:
self.versionstr = '8.00'
if self.version_num >= 8.0:
self.versionstr = '9.00'
if env.has_key('slnguid') and env['slnguid']:
self.slnguid = env['slnguid']
else:
self.slnguid = _generateGUID(dswfile, self.name)
self.configs = {}
self.nokeep = 0
if env.has_key('nokeep') and env['variant'] != 0:
self.nokeep = 1
if self.nokeep == 0 and os.path.exists(self.dswfile):
self.Parse()
def AddConfig(self, variant, dswfile=dswfile):
config = Config()
match = re.match('(.*)\|(.*)', variant)
if match:
config.variant = match.group(1)
config.platform = match.group(2)
else:
config.variant = variant
config.platform = 'Win32'
self.configs[variant] = config
print "Adding '" + self.name + ' - ' + config.variant + '|' + config.platform + "' to '" + str(dswfile) + "'"
if not env.has_key('variant'):
raise SCons.Errors.InternalError, \
"You must specify a 'variant' argument (i.e. 'Debug' or " +\
"'Release') to create an MSVS Solution File."
elif SCons.Util.is_String(env['variant']):
AddConfig(self, env['variant'])
elif SCons.Util.is_List(env['variant']):
for variant in env['variant']:
AddConfig(self, variant)
self.platforms = []
for key in self.configs.keys():
platform = self.configs[key].platform
if not platform in self.platforms:
self.platforms.append(platform)
def Parse(self):
try:
dswfile = open(self.dswfile,'r')
except IOError:
return # doesn't exist yet, so can't add anything to configs.
line = dswfile.readline()
while line:
if line[:9] == "EndGlobal":
break
line = dswfile.readline()
line = dswfile.readline()
datas = line
while line:
line = dswfile.readline()
datas = datas + line
# OK, we've found our little pickled cache of data.
try:
datas = base64.decodestring(datas)
data = pickle.loads(datas)
except KeyboardInterrupt:
raise
except:
return # unable to unpickle any data for some reason
self.configs.update(data)
def PrintSolution(self):
"""Writes a solution file"""
self.file.write('Microsoft Visual Studio Solution File, Format Version %s\n' % self.versionstr )
if self.version_num >= 8.0:
self.file.write('# Visual Studio 2005\n')
for p in self.dspfiles:
name = os.path.basename(p)
base, suffix = SCons.Util.splitext(name)
if suffix == '.vcproj':
name = base
guid = _generateGUID(p, '')
self.file.write('Project("%s") = "%s", "%s", "%s"\n'
% ( external_makefile_guid, name, p, guid ) )
if self.version_num >= 7.1 and self.version_num < 8.0:
self.file.write('\tProjectSection(ProjectDependencies) = postProject\n'
'\tEndProjectSection\n')
self.file.write('EndProject\n')
self.file.write('Global\n')
env = self.env
if env.has_key('MSVS_SCC_PROVIDER'):
dspfile_base = os.path.basename(self.dspfile)
slnguid = self.slnguid
scc_provider = env.get('MSVS_SCC_PROVIDER', '')
scc_provider = string.replace(scc_provider, ' ', r'\u0020')
scc_project_name = env.get('MSVS_SCC_PROJECT_NAME', '')
# scc_aux_path = env.get('MSVS_SCC_AUX_PATH', '')
scc_local_path = env.get('MSVS_SCC_LOCAL_PATH', '')
scc_project_base_path = env.get('MSVS_SCC_PROJECT_BASE_PATH', '')
# project_guid = env.get('MSVS_PROJECT_GUID', '')
self.file.write('\tGlobalSection(SourceCodeControl) = preSolution\n'
'\t\tSccNumberOfProjects = 2\n'
'\t\tSccProjectUniqueName0 = %(dspfile_base)s\n'
'\t\tSccLocalPath0 = %(scc_local_path)s\n'
'\t\tCanCheckoutShared = true\n'
'\t\tSccProjectFilePathRelativizedFromConnection0 = %(scc_project_base_path)s\n'
'\t\tSccProjectName1 = %(scc_project_name)s\n'
'\t\tSccLocalPath1 = %(scc_local_path)s\n'
'\t\tSccProvider1 = %(scc_provider)s\n'
'\t\tCanCheckoutShared = true\n'
'\t\tSccProjectFilePathRelativizedFromConnection1 = %(scc_project_base_path)s\n'
'\t\tSolutionUniqueID = %(slnguid)s\n'
'\tEndGlobalSection\n' % locals())
if self.version_num >= 8.0:
self.file.write('\tGlobalSection(SolutionConfigurationPlatforms) = preSolution\n')
else:
self.file.write('\tGlobalSection(SolutionConfiguration) = preSolution\n')
confkeys = self.configs.keys()
confkeys.sort()
cnt = 0
for name in confkeys:
variant = self.configs[name].variant
platform = self.configs[name].platform
if self.version_num >= 8.0:
self.file.write('\t\t%s|%s = %s|%s\n' % (variant, platform, variant, platform))
else:
self.file.write('\t\tConfigName.%d = %s\n' % (cnt, variant))
cnt = cnt + 1
self.file.write('\tEndGlobalSection\n')
if self.version_num < 7.1:
self.file.write('\tGlobalSection(ProjectDependencies) = postSolution\n'
'\tEndGlobalSection\n')
if self.version_num >= 8.0:
self.file.write('\tGlobalSection(ProjectConfigurationPlatforms) = postSolution\n')
else:
self.file.write('\tGlobalSection(ProjectConfiguration) = postSolution\n')
for name in confkeys:
variant = self.configs[name].variant
platform = self.configs[name].platform
if self.version_num >= 8.0:
for p in self.dspfiles:
guid = _generateGUID(p, '')
self.file.write('\t\t%s.%s|%s.ActiveCfg = %s|%s\n'
'\t\t%s.%s|%s.Build.0 = %s|%s\n' % (guid,variant,platform,variant,platform,guid,variant,platform,variant,platform))
else:
for p in self.dspfiles:
guid = _generateGUID(p, '')
self.file.write('\t\t%s.%s.ActiveCfg = %s|%s\n'
'\t\t%s.%s.Build.0 = %s|%s\n' %(guid,variant,variant,platform,guid,variant,variant,platform))
self.file.write('\tEndGlobalSection\n')
if self.version_num >= 8.0:
self.file.write('\tGlobalSection(SolutionProperties) = preSolution\n'
'\t\tHideSolutionNode = FALSE\n'
'\tEndGlobalSection\n')
else:
self.file.write('\tGlobalSection(ExtensibilityGlobals) = postSolution\n'
'\tEndGlobalSection\n'
'\tGlobalSection(ExtensibilityAddIns) = postSolution\n'
'\tEndGlobalSection\n')
self.file.write('EndGlobal\n')
if self.nokeep == 0:
pdata = pickle.dumps(self.configs,1)
pdata = base64.encodestring(pdata)
self.file.write(pdata + '\n')
def Build(self):
try:
self.file = open(self.dswfile,'w')
except IOError, detail:
raise SCons.Errors.InternalError, 'Unable to open "' + self.dswfile + '" for writing:' + str(detail)
else:
self.PrintSolution()
self.file.close()
V6DSWHeader = """\
Microsoft Developer Studio Workspace File, Format Version 6.00
# WARNING: DO NOT EDIT OR DELETE THIS WORKSPACE FILE!
###############################################################################
Project: "%(name)s"="%(dspfile)s" - Package Owner=<4>
Package=<5>
{{{
}}}
Package=<4>
{{{
}}}
###############################################################################
Global:
Package=<5>
{{{
}}}
Package=<3>
{{{
}}}
###############################################################################
"""
class _GenerateV6DSW(_DSWGenerator):
"""Generates a Workspace file for MSVS 6.0"""
def PrintWorkspace(self):
""" writes a DSW file """
name = self.name
dspfile = self.dspfiles[0]
self.file.write(V6DSWHeader % locals())
def Build(self):
try:
self.file = open(self.dswfile,'w')
except IOError, detail:
raise SCons.Errors.InternalError, 'Unable to open "' + self.dswfile + '" for writing:' + str(detail)
else:
self.PrintWorkspace()
self.file.close()
def GenerateDSP(dspfile, source, env):
"""Generates a Project file based on the version of MSVS that is being used"""
version_num = 6.0
if env.has_key('MSVS_VERSION'):
version_num, suite = msvs_parse_version(env['MSVS_VERSION'])
if version_num >= 7.0:
g = _GenerateV7DSP(dspfile, source, env)
g.Build()
else:
g = _GenerateV6DSP(dspfile, source, env)
g.Build()
def GenerateDSW(dswfile, source, env):
"""Generates a Solution/Workspace file based on the version of MSVS that is being used"""
version_num = 6.0
if env.has_key('MSVS_VERSION'):
version_num, suite = msvs_parse_version(env['MSVS_VERSION'])
if version_num >= 7.0:
g = _GenerateV7DSW(dswfile, source, env)
g.Build()
else:
g = _GenerateV6DSW(dswfile, source, env)
g.Build()
##############################################################################
# Above here are the classes and functions for generation of
# DSP/DSW/SLN/VCPROJ files.
##############################################################################
def GetMSVSProjectSuffix(target, source, env, for_signature):
return env['MSVS']['PROJECTSUFFIX']
def GetMSVSSolutionSuffix(target, source, env, for_signature):
return env['MSVS']['SOLUTIONSUFFIX']
def GenerateProject(target, source, env):
# generate the dsp file, according to the version of MSVS.
builddspfile = target[0]
dspfile = builddspfile.srcnode()
# this detects whether or not we're using a VariantDir
if not dspfile is builddspfile:
try:
bdsp = open(str(builddspfile), "w+")
except IOError, detail:
print 'Unable to open "' + str(dspfile) + '" for writing:',detail,'\n'
raise
bdsp.write("This is just a placeholder file.\nThe real project file is here:\n%s\n" % dspfile.get_abspath())
GenerateDSP(dspfile, source, env)
if env.get('auto_build_solution', 1):
builddswfile = target[1]
dswfile = builddswfile.srcnode()
if not dswfile is builddswfile:
try:
bdsw = open(str(builddswfile), "w+")
except IOError, detail:
print 'Unable to open "' + str(dspfile) + '" for writing:',detail,'\n'
raise
bdsw.write("This is just a placeholder file.\nThe real workspace file is here:\n%s\n" % dswfile.get_abspath())
GenerateDSW(dswfile, source, env)
def GenerateSolution(target, source, env):
GenerateDSW(target[0], source, env)
def projectEmitter(target, source, env):
"""Sets up the DSP dependencies."""
# todo: Not sure what sets source to what user has passed as target,
# but this is what happens. When that is fixed, we also won't have
# to make the user always append env['MSVSPROJECTSUFFIX'] to target.
if source[0] == target[0]:
source = []
# make sure the suffix is correct for the version of MSVS we're running.
(base, suff) = SCons.Util.splitext(str(target[0]))
suff = env.subst('$MSVSPROJECTSUFFIX')
target[0] = base + suff
if not source:
source = 'prj_inputs:'
source = source + env.subst('$MSVSSCONSCOM', 1)
source = source + env.subst('$MSVSENCODING', 1)
if env.has_key('buildtarget') and env['buildtarget'] != None:
if SCons.Util.is_String(env['buildtarget']):
source = source + ' "%s"' % env['buildtarget']
elif SCons.Util.is_List(env['buildtarget']):
for bt in env['buildtarget']:
if SCons.Util.is_String(bt):
source = source + ' "%s"' % bt
else:
try: source = source + ' "%s"' % bt.get_abspath()
except AttributeError: raise SCons.Errors.InternalError, \
"buildtarget can be a string, a node, a list of strings or nodes, or None"
else:
try: source = source + ' "%s"' % env['buildtarget'].get_abspath()
except AttributeError: raise SCons.Errors.InternalError, \
"buildtarget can be a string, a node, a list of strings or nodes, or None"
if env.has_key('outdir') and env['outdir'] != None:
if SCons.Util.is_String(env['outdir']):
source = source + ' "%s"' % env['outdir']
elif SCons.Util.is_List(env['outdir']):
for s in env['outdir']:
if SCons.Util.is_String(s):
source = source + ' "%s"' % s
else:
try: source = source + ' "%s"' % s.get_abspath()
except AttributeError: raise SCons.Errors.InternalError, \
"outdir can be a string, a node, a list of strings or nodes, or None"
else:
try: source = source + ' "%s"' % env['outdir'].get_abspath()
except AttributeError: raise SCons.Errors.InternalError, \
"outdir can be a string, a node, a list of strings or nodes, or None"
if env.has_key('name'):
if SCons.Util.is_String(env['name']):
source = source + ' "%s"' % env['name']
else:
raise SCons.Errors.InternalError, "name must be a string"
if env.has_key('variant'):
if SCons.Util.is_String(env['variant']):
source = source + ' "%s"' % env['variant']
elif SCons.Util.is_List(env['variant']):
for variant in env['variant']:
if SCons.Util.is_String(variant):
source = source + ' "%s"' % variant
else:
raise SCons.Errors.InternalError, "name must be a string or a list of strings"
else:
raise SCons.Errors.InternalError, "variant must be a string or a list of strings"
else:
raise SCons.Errors.InternalError, "variant must be specified"
for s in _DSPGenerator.srcargs:
if env.has_key(s):
if SCons.Util.is_String(env[s]):
source = source + ' "%s' % env[s]
elif SCons.Util.is_List(env[s]):
for t in env[s]:
if SCons.Util.is_String(t):
source = source + ' "%s"' % t
else:
raise SCons.Errors.InternalError, s + " must be a string or a list of strings"
else:
raise SCons.Errors.InternalError, s + " must be a string or a list of strings"
source = source + ' "%s"' % str(target[0])
source = [SCons.Node.Python.Value(source)]
targetlist = [target[0]]
sourcelist = source
if env.get('auto_build_solution', 1):
env['projects'] = targetlist
t, s = solutionEmitter(target, target, env)
targetlist = targetlist + t
return (targetlist, sourcelist)
def solutionEmitter(target, source, env):
"""Sets up the DSW dependencies."""
# todo: Not sure what sets source to what user has passed as target,
# but this is what happens. When that is fixed, we also won't have
# to make the user always append env['MSVSSOLUTIONSUFFIX'] to target.
if source[0] == target[0]:
source = []
# make sure the suffix is correct for the version of MSVS we're running.
(base, suff) = SCons.Util.splitext(str(target[0]))
suff = env.subst('$MSVSSOLUTIONSUFFIX')
target[0] = base + suff
if not source:
source = 'sln_inputs:'
if env.has_key('name'):
if SCons.Util.is_String(env['name']):
source = source + ' "%s"' % env['name']
else:
raise SCons.Errors.InternalError, "name must be a string"
if env.has_key('variant'):
if SCons.Util.is_String(env['variant']):
source = source + ' "%s"' % env['variant']
elif SCons.Util.is_List(env['variant']):
for variant in env['variant']:
if SCons.Util.is_String(variant):
source = source + ' "%s"' % variant
else:
raise SCons.Errors.InternalError, "name must be a string or a list of strings"
else:
raise SCons.Errors.InternalError, "variant must be a string or a list of strings"
else:
raise SCons.Errors.InternalError, "variant must be specified"
if env.has_key('slnguid'):
if SCons.Util.is_String(env['slnguid']):
source = source + ' "%s"' % env['slnguid']
else:
raise SCons.Errors.InternalError, "slnguid must be a string"
if env.has_key('projects'):
if SCons.Util.is_String(env['projects']):
source = source + ' "%s"' % env['projects']
elif SCons.Util.is_List(env['projects']):
for t in env['projects']:
if SCons.Util.is_String(t):
source = source + ' "%s"' % t
source = source + ' "%s"' % str(target[0])
source = [SCons.Node.Python.Value(source)]
return ([target[0]], source)
projectAction = SCons.Action.Action(GenerateProject, None)
solutionAction = SCons.Action.Action(GenerateSolution, None)
projectBuilder = SCons.Builder.Builder(action = '$MSVSPROJECTCOM',
suffix = '$MSVSPROJECTSUFFIX',
emitter = projectEmitter)
solutionBuilder = SCons.Builder.Builder(action = '$MSVSSOLUTIONCOM',
suffix = '$MSVSSOLUTIONSUFFIX',
emitter = solutionEmitter)
default_MSVS_SConscript = None
def generate(env):
"""Add Builders and construction variables for Microsoft Visual
Studio project files to an Environment."""
try:
env['BUILDERS']['MSVSProject']
except KeyError:
env['BUILDERS']['MSVSProject'] = projectBuilder
try:
env['BUILDERS']['MSVSSolution']
except KeyError:
env['BUILDERS']['MSVSSolution'] = solutionBuilder
env['MSVSPROJECTCOM'] = projectAction
env['MSVSSOLUTIONCOM'] = solutionAction
if SCons.Script.call_stack:
# XXX Need to find a way to abstract this; the build engine
# shouldn't depend on anything in SCons.Script.
env['MSVSSCONSCRIPT'] = SCons.Script.call_stack[0].sconscript
else:
global default_MSVS_SConscript
if default_MSVS_SConscript is None:
default_MSVS_SConscript = env.File('SConstruct')
env['MSVSSCONSCRIPT'] = default_MSVS_SConscript
env['MSVSSCONS'] = '"%s" -c "%s"' % (python_executable, getExecScriptMain(env))
env['MSVSSCONSFLAGS'] = '-C "${MSVSSCONSCRIPT.dir.abspath}" -f ${MSVSSCONSCRIPT.name}'
env['MSVSSCONSCOM'] = '$MSVSSCONS $MSVSSCONSFLAGS'
env['MSVSBUILDCOM'] = '$MSVSSCONSCOM "$MSVSBUILDTARGET"'
env['MSVSREBUILDCOM'] = '$MSVSSCONSCOM "$MSVSBUILDTARGET"'
env['MSVSCLEANCOM'] = '$MSVSSCONSCOM -c "$MSVSBUILDTARGET"'
env['MSVSENCODING'] = 'Windows-1252'
# Set-up ms tools paths for default version
merge_default_version(env)
version_num, suite = msvs_parse_version(env['MSVS_VERSION'])
if (version_num < 7.0):
env['MSVS']['PROJECTSUFFIX'] = '.dsp'
env['MSVS']['SOLUTIONSUFFIX'] = '.dsw'
else:
env['MSVS']['PROJECTSUFFIX'] = '.vcproj'
env['MSVS']['SOLUTIONSUFFIX'] = '.sln'
env['GET_MSVSPROJECTSUFFIX'] = GetMSVSProjectSuffix
env['GET_MSVSSOLUTIONSUFFIX'] = GetMSVSSolutionSuffix
env['MSVSPROJECTSUFFIX'] = '${GET_MSVSPROJECTSUFFIX}'
env['MSVSSOLUTIONSUFFIX'] = '${GET_MSVSSOLUTIONSUFFIX}'
env['SCONS_HOME'] = os.environ.get('SCONS_HOME')
def exists(env):
return detect_msvs()
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
|
NASLab/GroundROS | refs/heads/master | src/unittests/pathGenerator_test.py | 1 | import sys
sys.path.insert(0, '..')
import path_generator as pthgen
import matplotlib.pyplot as plt
import time
import numpy as np
import pylab
pylab.ion()
def plotPath(pthgen):
fig = plt.figure()
pthgen.getPosition()
plt.plot(pthgen.x_array, pthgen.y_array)
plt.xlim(-1.1, 1.1)
plt.ylim(-1.1, 1.1)
plt.xlabel('X')
plt.ylabel('Y')
plt.title('position')
plt.axes().set_aspect(1)
for t in range(1, 100):
ln, = plt.plot(pthgen.x, pthgen.y, 'ro')
plt.draw()
time.sleep(.1)
ln.remove()
pthgen.setVelocity(np.sin(t*np.pi/50)/1.8)
pthgen.getPosition()
plt.close(fig)
pth = pthgen.PathGenerator(path_type='circle', speed=.3)
plotPath(pth)
pth = pthgen.PathGenerator(path_type='infinity', speed=.3)
plotPath(pth)
pth = pthgen.PathGenerator(path_type='broken_line', speed=.3)
plotPath(pth)
pth = pthgen.PathGenerator(path_type='two_lines', speed=.3)
plotPath(pth)
pth = pthgen.PathGenerator(path_type='line', speed=.3)
plotPath(pth) |
illfelder/compute-image-packages | refs/heads/master | packages/python-google-compute-engine/google_compute_engine/metadata_scripts/tests/script_retriever_test.py | 2 | #!/usr/bin/python
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Unittest for script_retriever.py module."""
import subprocess
from google_compute_engine.compat import urlerror
from google_compute_engine.metadata_scripts import script_retriever
from google_compute_engine.test_compat import builtin
from google_compute_engine.test_compat import mock
from google_compute_engine.test_compat import unittest
class ScriptRetrieverTest(unittest.TestCase):
def setUp(self):
self.script_type = 'test'
self.dest_dir = '/tmp'
self.dest = '/tmp/file'
self.mock_logger = mock.Mock()
self.mock_watcher = mock.Mock()
self.retriever = script_retriever.ScriptRetriever(
self.mock_logger, self.script_type)
@mock.patch('google_compute_engine.metadata_scripts.script_retriever.tempfile.NamedTemporaryFile')
@mock.patch('google_compute_engine.metadata_scripts.script_retriever.urlrequest.Request')
@mock.patch('google_compute_engine.metadata_scripts.script_retriever.urlrequest.urlopen')
def testDownloadAuthUrl(self, mock_urlopen, mock_request, mock_tempfile):
auth_url = 'https://storage.googleapis.com/fake/url'
mock_tempfile.return_value = mock_tempfile
mock_tempfile.name = self.dest
self.retriever.token = 'bar'
mock_open = mock.mock_open()
with mock.patch('%s.open' % builtin, mock_open):
self.assertEqual(
self.retriever._DownloadAuthUrl(auth_url, self.dest_dir), self.dest)
mock_tempfile.assert_called_once_with(dir=self.dest_dir, delete=False)
mock_tempfile.close.assert_called_once_with()
self.mock_logger.info.assert_called_once_with(
mock.ANY, auth_url, self.dest)
mock_request.assert_called_with(auth_url)
mocked_request = mock_request()
mocked_request.add_unredirected_header.assert_called_with(
'Authorization', 'bar')
mock_urlopen.assert_called_with(mocked_request)
urlopen_read = mock_urlopen().read(return_value=b'foo')
self.mock_logger.warning.assert_not_called()
mock_open.assert_called_once_with(self.dest, 'wb')
handle = mock_open()
handle.write.assert_called_once_with(urlopen_read)
@mock.patch('google_compute_engine.metadata_scripts.script_retriever.tempfile.NamedTemporaryFile')
@mock.patch('google_compute_engine.metadata_scripts.script_retriever.urlrequest.Request')
@mock.patch('google_compute_engine.metadata_watcher.MetadataWatcher.GetMetadata')
def testDownloadAuthUrlExceptionAndToken(
self, mock_get_metadata, mock_request, mock_tempfile):
auth_url = 'https://storage.googleapis.com/fake/url'
metadata_prefix = 'http://metadata.google.internal/computeMetadata/v1/'
token_url = metadata_prefix + 'instance/service-accounts/default/token'
mock_tempfile.return_value = mock_tempfile
mock_tempfile.name = self.dest
self.retriever.token = None
mock_get_metadata.return_value = {
'token_type': 'foo', 'access_token': 'bar'}
mock_request.return_value = mock_request
mock_request.side_effect = urlerror.URLError('Error.')
self.assertIsNone(self.retriever._DownloadAuthUrl(auth_url, self.dest_dir))
mock_get_metadata.return_value = mock_get_metadata
# GetMetadata includes a prefix, so remove it.
stripped_url = token_url.replace(metadata_prefix, '')
mock_get_metadata.assert_called_once_with(
stripped_url, recursive=False, retry=False)
self.assertEqual(self.retriever.token, 'foo bar')
self.mock_logger.info.assert_called_once_with(
mock.ANY, auth_url, self.dest)
self.assertEqual(self.mock_logger.warning.call_count, 1)
@mock.patch('google_compute_engine.metadata_scripts.script_retriever.tempfile.NamedTemporaryFile')
@mock.patch('google_compute_engine.metadata_scripts.script_retriever.ScriptRetriever._DownloadUrl')
@mock.patch('google_compute_engine.metadata_watcher.MetadataWatcher.GetMetadata')
def testDownloadAuthUrlFallback(
self, mock_get_metadata, mock_download_url, mock_tempfile):
auth_url = 'https://storage.googleapis.com/fake/url'
metadata_prefix = 'http://metadata.google.internal/computeMetadata/v1/'
token_url = metadata_prefix + 'instance/service-accounts/default/token'
mock_tempfile.return_value = mock_tempfile
mock_tempfile.name = self.dest
self.retriever.token = None
mock_get_metadata.return_value = None
mock_download_url.return_value = None
self.assertIsNone(self.retriever._DownloadAuthUrl(auth_url, self.dest_dir))
mock_get_metadata.return_value = mock_get_metadata
# GetMetadata includes a prefix, so remove it.
prefix = 'http://metadata.google.internal/computeMetadata/v1/'
stripped_url = token_url.replace(prefix, '')
mock_get_metadata.assert_called_once_with(
stripped_url, recursive=False, retry=False)
mock_download_url.assert_called_once_with(auth_url, self.dest_dir)
self.assertIsNone(self.retriever.token)
expected_calls = [
mock.call(mock.ANY, auth_url, self.dest),
mock.call(mock.ANY),
]
self.assertEqual(self.mock_logger.info.mock_calls, expected_calls)
@mock.patch('google_compute_engine.metadata_scripts.script_retriever.tempfile.NamedTemporaryFile')
@mock.patch('google_compute_engine.metadata_scripts.script_retriever.urlretrieve.urlretrieve')
def testDownloadUrl(self, mock_retrieve, mock_tempfile):
url = 'http://www.google.com/fake/url'
mock_tempfile.return_value = mock_tempfile
mock_tempfile.name = self.dest
self.assertEqual(
self.retriever._DownloadUrl(url, self.dest_dir), self.dest)
mock_tempfile.assert_called_once_with(dir=self.dest_dir, delete=False)
mock_tempfile.close.assert_called_once_with()
self.mock_logger.info.assert_called_once_with(mock.ANY, url, self.dest)
mock_retrieve.assert_called_once_with(url, self.dest)
self.mock_logger.warning.assert_not_called()
@mock.patch('google_compute_engine.metadata_scripts.script_retriever.time')
@mock.patch('google_compute_engine.metadata_scripts.script_retriever.tempfile.NamedTemporaryFile')
@mock.patch('google_compute_engine.metadata_scripts.script_retriever.urlretrieve.urlretrieve')
def testDownloadUrlProcessError(self, mock_retrieve, mock_tempfile, mock_time):
url = 'http://www.google.com/fake/url'
mock_tempfile.return_value = mock_tempfile
mock_tempfile.name = self.dest
mock_success = mock.Mock()
mock_success.getcode.return_value = script_retriever.httpclient.OK
# Success after 3 timeout. Since max_retry = 3, the final result is fail.
mock_retrieve.side_effect = [
script_retriever.socket.timeout(),
script_retriever.socket.timeout(),
script_retriever.socket.timeout(),
mock_success,
]
self.assertIsNone(self.retriever._DownloadUrl(url, self.dest_dir))
self.assertEqual(self.mock_logger.warning.call_count, 1)
@mock.patch('google_compute_engine.metadata_scripts.script_retriever.time')
@mock.patch('google_compute_engine.metadata_scripts.script_retriever.tempfile.NamedTemporaryFile')
@mock.patch('google_compute_engine.metadata_scripts.script_retriever.urlretrieve.urlretrieve')
def testDownloadUrlWithRetry(self, mock_retrieve, mock_tempfile, mock_time):
url = 'http://www.google.com/fake/url'
mock_tempfile.return_value = mock_tempfile
mock_tempfile.name = self.dest
mock_success = mock.Mock()
mock_success.getcode.return_value = script_retriever.httpclient.OK
# Success after 2 timeout. Since max_retry = 3, the final result is success.
mock_retrieve.side_effect = [
script_retriever.socket.timeout(),
script_retriever.socket.timeout(),
mock_success,
]
self.assertIsNotNone(self.retriever._DownloadUrl(url, self.dest_dir))
@mock.patch('google_compute_engine.metadata_scripts.script_retriever.tempfile.NamedTemporaryFile')
@mock.patch('google_compute_engine.metadata_scripts.script_retriever.urlretrieve.urlretrieve')
def testDownloadUrlException(self, mock_retrieve, mock_tempfile):
url = 'http://www.google.com/fake/url'
mock_tempfile.return_value = mock_tempfile
mock_tempfile.name = self.dest
mock_retrieve.side_effect = Exception('Error.')
self.assertIsNone(self.retriever._DownloadUrl(url, self.dest_dir))
self.assertEqual(self.mock_logger.warning.call_count, 1)
def _CreateUrls(self, bucket, obj, gs_match=True):
"""Creates a URL for each of the supported Google Storage URL formats.
Args:
bucket: string, the Google Storage bucket name.
obj: string, the object name in the bucket.
gs_match: bool, True if the bucket and object names are valid.
Returns:
(list, dict):
list, the URLs to download.
dict, a Google Storage URL mapped to the expected 'gs://' format.
"""
gs_url = 'gs://%s/%s' % (bucket, obj)
gs_urls = {gs_url: gs_url}
url_formats = [
'http://%s.storage.googleapis.com/%s',
'https://%s.storage.googleapis.com/%s',
'http://storage.googleapis.com/%s/%s',
'https://storage.googleapis.com/%s/%s',
'http://commondatastorage.googleapis.com/%s/%s',
'https://commondatastorage.googleapis.com/%s/%s',
]
url_formats = [url % (bucket, obj) for url in url_formats]
if gs_match:
gs_urls.update(dict((url, gs_url) for url in url_formats))
return ([], gs_urls)
else:
return (url_formats, gs_urls)
def testDownloadScript(self):
mock_auth_download = mock.Mock()
self.retriever._DownloadAuthUrl = mock_auth_download
mock_download = mock.Mock()
self.retriever._DownloadUrl = mock_download
download_urls = []
download_gs_urls = {}
component_urls = [
('@#$%^', '\n\n\n\n', False),
('///////', '///////', False),
('Abc', 'xyz', False),
(' abc', 'xyz', False),
('abc', 'xyz?', False),
('abc', 'xyz*', False),
('', 'xyz', False),
('a', 'xyz', False),
('abc', '', False),
('hello', 'world', True),
('hello', 'world!', True),
('hello', 'world !', True),
('hello', 'w o r l d ', True),
('hello', 'w\no\nr\nl\nd ', True),
('123_hello', '1!@#$%^', True),
('123456', 'hello.world', True),
]
for bucket, obj, gs_match in component_urls:
urls, gs_urls = self._CreateUrls(bucket, obj, gs_match=gs_match)
download_urls.extend(urls)
download_gs_urls.update(gs_urls)
# All Google Storage URLs are downloaded with an authentication token.
for url, gs_url in download_gs_urls.items():
mock_download.reset_mock()
mock_auth_download.reset_mock()
self.retriever._DownloadScript(gs_url, self.dest_dir)
new_gs_url = gs_url.replace('gs://', 'https://storage.googleapis.com/')
mock_auth_download.assert_called_once_with(new_gs_url, self.dest_dir)
mock_download.assert_not_called()
for url in download_urls:
mock_download.reset_mock()
self.retriever._DownloadScript(url, self.dest_dir)
mock_download.assert_called_once_with(url, self.dest_dir)
for url, gs_url in download_gs_urls.items():
if url.startswith('gs://'):
continue
mock_auth_download.reset_mock()
mock_auth_download.return_value = None
mock_download.reset_mock()
self.retriever._DownloadScript(url, self.dest_dir)
mock_auth_download.assert_called_once_with(url, self.dest_dir)
@mock.patch('google_compute_engine.metadata_scripts.script_retriever.tempfile.NamedTemporaryFile')
def testGetAttributeScripts(self, mock_tempfile):
script = 'echo Hello World.\n'
script_dest = '/tmp/script'
script_url = 'gs://fake/url'
script_url_dest = '/tmp/script_url'
attribute_data = {
'%s-script' % self.script_type: '\n%s' % script,
'%s-script-url' % self.script_type: script_url,
}
expected_data = {
'%s-script' % self.script_type: script_dest,
'%s-script-url' % self.script_type: script_url_dest,
}
# Mock saving a script to a file.
mock_dest = mock.Mock()
mock_dest.name = script_dest
mock_tempfile.__enter__.return_value = mock_dest
mock_tempfile.return_value = mock_tempfile
# Mock downloading a script from a URL.
mock_download = mock.Mock()
mock_download.return_value = script_url_dest
self.retriever._DownloadScript = mock_download
self.assertEqual(
self.retriever._GetAttributeScripts(attribute_data, self.dest_dir),
expected_data)
self.assertEqual(self.mock_logger.info.call_count, 2)
mock_dest.write.assert_called_once_with(script)
mock_download.assert_called_once_with(script_url, self.dest_dir)
def testGetAttributeScriptsNone(self):
attribute_data = {}
expected_data = {}
self.assertEqual(
self.retriever._GetAttributeScripts(attribute_data, self.dest_dir),
expected_data)
self.mock_logger.info.assert_not_called()
@mock.patch('google_compute_engine.metadata_scripts.script_retriever.tempfile.NamedTemporaryFile')
def testGetScripts(self, mock_tempfile):
script_dest = '/tmp/script'
script_url_dest = '/tmp/script_url'
metadata = {
'instance': {
'attributes': {
'%s-script' % self.script_type: 'a',
'%s-script-url' % self.script_type: 'b',
},
},
'project': {
'attributes': {
'%s-script' % self.script_type: 'c',
'%s-script-url' % self.script_type: 'd',
},
},
}
expected_data = {
'%s-script' % self.script_type: script_dest,
'%s-script-url' % self.script_type: script_url_dest,
}
self.mock_watcher.GetMetadata.return_value = metadata
self.retriever.watcher = self.mock_watcher
# Mock saving a script to a file.
mock_dest = mock.Mock()
mock_dest.name = script_dest
mock_tempfile.__enter__.return_value = mock_dest
mock_tempfile.return_value = mock_tempfile
# Mock downloading a script from a URL.
mock_download = mock.Mock()
mock_download.return_value = script_url_dest
self.retriever._DownloadScript = mock_download
self.assertEqual(self.retriever.GetScripts(self.dest_dir), expected_data)
self.assertEqual(self.mock_logger.info.call_count, 2)
self.assertEqual(self.mock_logger.warning.call_count, 0)
mock_dest.write.assert_called_once_with('a')
mock_download.assert_called_once_with('b', self.dest_dir)
def testGetScriptsNone(self):
metadata = {
'instance': {
'attributes': None,
},
'project': {
'attributes': None,
},
}
expected_data = {}
self.mock_watcher.GetMetadata.return_value = metadata
self.retriever.watcher = self.mock_watcher
self.assertEqual(self.retriever.GetScripts(self.dest_dir), expected_data)
self.mock_logger.info.assert_not_called()
def testGetScriptsNoMetadata(self):
metadata = None
expected_data = {}
self.mock_watcher.GetMetadata.return_value = metadata
self.retriever.watcher = self.mock_watcher
self.assertEqual(self.retriever.GetScripts(self.dest_dir), expected_data)
self.mock_logger.info.assert_not_called()
self.assertEqual(self.mock_logger.warning.call_count, 2)
@mock.patch('google_compute_engine.metadata_scripts.script_retriever.tempfile.NamedTemporaryFile')
def testGetScriptsFailed(self, mock_tempfile):
script_dest = '/tmp/script'
script_url_dest = None
metadata = {
'instance': {
'attributes': {
'%s-script' % self.script_type: 'a',
'%s-script-url' % self.script_type: 'b',
},
},
'project': {
'attributes': {
'%s-script' % self.script_type: 'c',
'%s-script-url' % self.script_type: 'd',
},
},
}
expected_data = {
'%s-script' % self.script_type: script_dest,
'%s-script-url' % self.script_type: script_url_dest,
}
self.mock_watcher.GetMetadata.return_value = metadata
self.retriever.watcher = self.mock_watcher
# Mock saving a script to a file.
mock_dest = mock.Mock()
mock_dest.name = script_dest
mock_tempfile.__enter__.return_value = mock_dest
mock_tempfile.return_value = mock_tempfile
# Mock downloading a script from a URL.
mock_download = mock.Mock()
mock_download.return_value = None
self.retriever._DownloadScript = mock_download
self.assertEqual(self.retriever.GetScripts(self.dest_dir), expected_data)
self.assertEqual(self.mock_logger.info.call_count, 2)
self.assertEqual(self.mock_logger.warning.call_count, 1)
if __name__ == '__main__':
unittest.main()
|
mvidalgarcia/indico | refs/heads/master | indico/legacy/services/implementation/base.py | 2 | # This file is part of Indico.
# Copyright (C) 2002 - 2019 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
from flask import g, session
from werkzeug.exceptions import Forbidden
from indico.core.logger import sentry_set_tags
class ServiceBase(object):
"""
The ServiceBase class is the basic class for services.
"""
def __init__(self, params):
self._params = params
def _process_args(self):
pass
def _check_access(self):
pass
def process(self):
"""
Processes the request, analyzing the parameters, and feeding them to the
_getAnswer() method (implemented by derived classes)
"""
g.rh = self
sentry_set_tags({'rh': self.__class__.__name__})
self._process_args()
self._check_access()
return self._getAnswer()
def _getAnswer(self):
"""
To be overloaded. It should contain the code that does the actual
business logic and returns a result (python JSON-serializable object).
If this method is not overloaded, an exception will occur.
If you don't want to return an answer, you should still implement this method with 'pass'.
"""
raise NotImplementedError
class LoggedOnlyService(ServiceBase):
def _check_access(self):
if session.user is None:
raise Forbidden("You are currently not authenticated. Please log in again.")
|
Softmotions/edx-platform | refs/heads/master | common/djangoapps/heartbeat/views.py | 199 | from xmodule.modulestore.django import modulestore
from dogapi import dog_stats_api
from util.json_request import JsonResponse
from django.db import connection
from django.db.utils import DatabaseError
from xmodule.exceptions import HeartbeatFailure
@dog_stats_api.timed('edxapp.heartbeat')
def heartbeat(request):
"""
Simple view that a loadbalancer can check to verify that the app is up. Returns a json doc
of service id: status or message. If the status for any service is anything other than True,
it returns HTTP code 503 (Service Unavailable); otherwise, it returns 200.
"""
# This refactoring merely delegates to the default modulestore (which if it's mixed modulestore will
# delegate to all configured modulestores) and a quick test of sql. A later refactoring may allow
# any service to register itself as participating in the heartbeat. It's important that all implementation
# do as little as possible but give a sound determination that they are ready.
try:
output = modulestore().heartbeat()
except HeartbeatFailure as fail:
return JsonResponse({fail.service: unicode(fail)}, status=503)
cursor = connection.cursor()
try:
cursor.execute("SELECT CURRENT_DATE")
cursor.fetchone()
output['SQL'] = True
except DatabaseError as fail:
return JsonResponse({'SQL': unicode(fail)}, status=503)
return JsonResponse(output)
|
bodi000/odoo | refs/heads/master | addons/account/wizard/account_report_common_account.py | 371 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
class account_common_account_report(osv.osv_memory):
_name = 'account.common.account.report'
_description = 'Account Common Account Report'
_inherit = "account.common.report"
_columns = {
'display_account': fields.selection([('all','All'), ('movement','With movements'),
('not_zero','With balance is not equal to 0'),
],'Display Accounts', required=True),
}
_defaults = {
'display_account': 'movement',
}
def pre_print_report(self, cr, uid, ids, data, context=None):
if context is None:
context = {}
data['form'].update(self.read(cr, uid, ids, ['display_account'], context=context)[0])
return data
#vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
atheed/servo | refs/heads/master | tests/wpt/css-tests/tools/pytest/_pytest/runner.py | 173 | """ basic collect and runtest protocol implementations """
import bdb
import sys
from time import time
import py
import pytest
from _pytest._code.code import TerminalRepr, ExceptionInfo
def pytest_namespace():
return {
'fail' : fail,
'skip' : skip,
'importorskip' : importorskip,
'exit' : exit,
}
#
# pytest plugin hooks
def pytest_addoption(parser):
group = parser.getgroup("terminal reporting", "reporting", after="general")
group.addoption('--durations',
action="store", type=int, default=None, metavar="N",
help="show N slowest setup/test durations (N=0 for all)."),
def pytest_terminal_summary(terminalreporter):
durations = terminalreporter.config.option.durations
if durations is None:
return
tr = terminalreporter
dlist = []
for replist in tr.stats.values():
for rep in replist:
if hasattr(rep, 'duration'):
dlist.append(rep)
if not dlist:
return
dlist.sort(key=lambda x: x.duration)
dlist.reverse()
if not durations:
tr.write_sep("=", "slowest test durations")
else:
tr.write_sep("=", "slowest %s test durations" % durations)
dlist = dlist[:durations]
for rep in dlist:
nodeid = rep.nodeid.replace("::()::", "::")
tr.write_line("%02.2fs %-8s %s" %
(rep.duration, rep.when, nodeid))
def pytest_sessionstart(session):
session._setupstate = SetupState()
def pytest_sessionfinish(session):
session._setupstate.teardown_all()
class NodeInfo:
def __init__(self, location):
self.location = location
def pytest_runtest_protocol(item, nextitem):
item.ihook.pytest_runtest_logstart(
nodeid=item.nodeid, location=item.location,
)
runtestprotocol(item, nextitem=nextitem)
return True
def runtestprotocol(item, log=True, nextitem=None):
hasrequest = hasattr(item, "_request")
if hasrequest and not item._request:
item._initrequest()
rep = call_and_report(item, "setup", log)
reports = [rep]
if rep.passed:
reports.append(call_and_report(item, "call", log))
reports.append(call_and_report(item, "teardown", log,
nextitem=nextitem))
# after all teardown hooks have been called
# want funcargs and request info to go away
if hasrequest:
item._request = False
item.funcargs = None
return reports
def pytest_runtest_setup(item):
item.session._setupstate.prepare(item)
def pytest_runtest_call(item):
try:
item.runtest()
except Exception:
# Store trace info to allow postmortem debugging
type, value, tb = sys.exc_info()
tb = tb.tb_next # Skip *this* frame
sys.last_type = type
sys.last_value = value
sys.last_traceback = tb
del tb # Get rid of it in this namespace
raise
def pytest_runtest_teardown(item, nextitem):
item.session._setupstate.teardown_exact(item, nextitem)
def pytest_report_teststatus(report):
if report.when in ("setup", "teardown"):
if report.failed:
# category, shortletter, verbose-word
return "error", "E", "ERROR"
elif report.skipped:
return "skipped", "s", "SKIPPED"
else:
return "", "", ""
#
# Implementation
def call_and_report(item, when, log=True, **kwds):
call = call_runtest_hook(item, when, **kwds)
hook = item.ihook
report = hook.pytest_runtest_makereport(item=item, call=call)
if log:
hook.pytest_runtest_logreport(report=report)
if check_interactive_exception(call, report):
hook.pytest_exception_interact(node=item, call=call, report=report)
return report
def check_interactive_exception(call, report):
return call.excinfo and not (
hasattr(report, "wasxfail") or
call.excinfo.errisinstance(skip.Exception) or
call.excinfo.errisinstance(bdb.BdbQuit))
def call_runtest_hook(item, when, **kwds):
hookname = "pytest_runtest_" + when
ihook = getattr(item.ihook, hookname)
return CallInfo(lambda: ihook(item=item, **kwds), when=when)
class CallInfo:
""" Result/Exception info a function invocation. """
#: None or ExceptionInfo object.
excinfo = None
def __init__(self, func, when):
#: context of invocation: one of "setup", "call",
#: "teardown", "memocollect"
self.when = when
self.start = time()
try:
self.result = func()
except KeyboardInterrupt:
self.stop = time()
raise
except:
self.excinfo = ExceptionInfo()
self.stop = time()
def __repr__(self):
if self.excinfo:
status = "exception: %s" % str(self.excinfo.value)
else:
status = "result: %r" % (self.result,)
return "<CallInfo when=%r %s>" % (self.when, status)
def getslaveinfoline(node):
try:
return node._slaveinfocache
except AttributeError:
d = node.slaveinfo
ver = "%s.%s.%s" % d['version_info'][:3]
node._slaveinfocache = s = "[%s] %s -- Python %s %s" % (
d['id'], d['sysplatform'], ver, d['executable'])
return s
class BaseReport(object):
def __init__(self, **kw):
self.__dict__.update(kw)
def toterminal(self, out):
if hasattr(self, 'node'):
out.line(getslaveinfoline(self.node))
longrepr = self.longrepr
if longrepr is None:
return
if hasattr(longrepr, 'toterminal'):
longrepr.toterminal(out)
else:
try:
out.line(longrepr)
except UnicodeEncodeError:
out.line("<unprintable longrepr>")
def get_sections(self, prefix):
for name, content in self.sections:
if name.startswith(prefix):
yield prefix, content
passed = property(lambda x: x.outcome == "passed")
failed = property(lambda x: x.outcome == "failed")
skipped = property(lambda x: x.outcome == "skipped")
@property
def fspath(self):
return self.nodeid.split("::")[0]
def pytest_runtest_makereport(item, call):
when = call.when
duration = call.stop-call.start
keywords = dict([(x,1) for x in item.keywords])
excinfo = call.excinfo
sections = []
if not call.excinfo:
outcome = "passed"
longrepr = None
else:
if not isinstance(excinfo, ExceptionInfo):
outcome = "failed"
longrepr = excinfo
elif excinfo.errisinstance(pytest.skip.Exception):
outcome = "skipped"
r = excinfo._getreprcrash()
longrepr = (str(r.path), r.lineno, r.message)
else:
outcome = "failed"
if call.when == "call":
longrepr = item.repr_failure(excinfo)
else: # exception in setup or teardown
longrepr = item._repr_failure_py(excinfo,
style=item.config.option.tbstyle)
for rwhen, key, content in item._report_sections:
sections.append(("Captured %s %s" %(key, rwhen), content))
return TestReport(item.nodeid, item.location,
keywords, outcome, longrepr, when,
sections, duration)
class TestReport(BaseReport):
""" Basic test report object (also used for setup and teardown calls if
they fail).
"""
def __init__(self, nodeid, location, keywords, outcome,
longrepr, when, sections=(), duration=0, **extra):
#: normalized collection node id
self.nodeid = nodeid
#: a (filesystempath, lineno, domaininfo) tuple indicating the
#: actual location of a test item - it might be different from the
#: collected one e.g. if a method is inherited from a different module.
self.location = location
#: a name -> value dictionary containing all keywords and
#: markers associated with a test invocation.
self.keywords = keywords
#: test outcome, always one of "passed", "failed", "skipped".
self.outcome = outcome
#: None or a failure representation.
self.longrepr = longrepr
#: one of 'setup', 'call', 'teardown' to indicate runtest phase.
self.when = when
#: list of (secname, data) extra information which needs to
#: marshallable
self.sections = list(sections)
#: time it took to run just the test
self.duration = duration
self.__dict__.update(extra)
def __repr__(self):
return "<TestReport %r when=%r outcome=%r>" % (
self.nodeid, self.when, self.outcome)
class TeardownErrorReport(BaseReport):
outcome = "failed"
when = "teardown"
def __init__(self, longrepr, **extra):
self.longrepr = longrepr
self.sections = []
self.__dict__.update(extra)
def pytest_make_collect_report(collector):
call = CallInfo(collector._memocollect, "memocollect")
longrepr = None
if not call.excinfo:
outcome = "passed"
else:
from _pytest import nose
skip_exceptions = (Skipped,) + nose.get_skip_exceptions()
if call.excinfo.errisinstance(skip_exceptions):
outcome = "skipped"
r = collector._repr_failure_py(call.excinfo, "line").reprcrash
longrepr = (str(r.path), r.lineno, r.message)
else:
outcome = "failed"
errorinfo = collector.repr_failure(call.excinfo)
if not hasattr(errorinfo, "toterminal"):
errorinfo = CollectErrorRepr(errorinfo)
longrepr = errorinfo
rep = CollectReport(collector.nodeid, outcome, longrepr,
getattr(call, 'result', None))
rep.call = call # see collect_one_node
return rep
class CollectReport(BaseReport):
def __init__(self, nodeid, outcome, longrepr, result,
sections=(), **extra):
self.nodeid = nodeid
self.outcome = outcome
self.longrepr = longrepr
self.result = result or []
self.sections = list(sections)
self.__dict__.update(extra)
@property
def location(self):
return (self.fspath, None, self.fspath)
def __repr__(self):
return "<CollectReport %r lenresult=%s outcome=%r>" % (
self.nodeid, len(self.result), self.outcome)
class CollectErrorRepr(TerminalRepr):
def __init__(self, msg):
self.longrepr = msg
def toterminal(self, out):
out.line(self.longrepr, red=True)
class SetupState(object):
""" shared state for setting up/tearing down test items or collectors. """
def __init__(self):
self.stack = []
self._finalizers = {}
def addfinalizer(self, finalizer, colitem):
""" attach a finalizer to the given colitem.
if colitem is None, this will add a finalizer that
is called at the end of teardown_all().
"""
assert colitem and not isinstance(colitem, tuple)
assert py.builtin.callable(finalizer)
#assert colitem in self.stack # some unit tests don't setup stack :/
self._finalizers.setdefault(colitem, []).append(finalizer)
def _pop_and_teardown(self):
colitem = self.stack.pop()
self._teardown_with_finalization(colitem)
def _callfinalizers(self, colitem):
finalizers = self._finalizers.pop(colitem, None)
exc = None
while finalizers:
fin = finalizers.pop()
try:
fin()
except Exception:
# XXX Only first exception will be seen by user,
# ideally all should be reported.
if exc is None:
exc = sys.exc_info()
if exc:
py.builtin._reraise(*exc)
def _teardown_with_finalization(self, colitem):
self._callfinalizers(colitem)
if hasattr(colitem, "teardown"):
colitem.teardown()
for colitem in self._finalizers:
assert colitem is None or colitem in self.stack \
or isinstance(colitem, tuple)
def teardown_all(self):
while self.stack:
self._pop_and_teardown()
for key in list(self._finalizers):
self._teardown_with_finalization(key)
assert not self._finalizers
def teardown_exact(self, item, nextitem):
needed_collectors = nextitem and nextitem.listchain() or []
self._teardown_towards(needed_collectors)
def _teardown_towards(self, needed_collectors):
while self.stack:
if self.stack == needed_collectors[:len(self.stack)]:
break
self._pop_and_teardown()
def prepare(self, colitem):
""" setup objects along the collector chain to the test-method
and teardown previously setup objects."""
needed_collectors = colitem.listchain()
self._teardown_towards(needed_collectors)
# check if the last collection node has raised an error
for col in self.stack:
if hasattr(col, '_prepare_exc'):
py.builtin._reraise(*col._prepare_exc)
for col in needed_collectors[len(self.stack):]:
self.stack.append(col)
try:
col.setup()
except Exception:
col._prepare_exc = sys.exc_info()
raise
def collect_one_node(collector):
ihook = collector.ihook
ihook.pytest_collectstart(collector=collector)
rep = ihook.pytest_make_collect_report(collector=collector)
call = rep.__dict__.pop("call", None)
if call and check_interactive_exception(call, rep):
ihook.pytest_exception_interact(node=collector, call=call, report=rep)
return rep
# =============================================================
# Test OutcomeExceptions and helpers for creating them.
class OutcomeException(Exception):
""" OutcomeException and its subclass instances indicate and
contain info about test and collection outcomes.
"""
def __init__(self, msg=None, pytrace=True):
Exception.__init__(self, msg)
self.msg = msg
self.pytrace = pytrace
def __repr__(self):
if self.msg:
val = self.msg
if isinstance(val, bytes):
val = py._builtin._totext(val, errors='replace')
return val
return "<%s instance>" %(self.__class__.__name__,)
__str__ = __repr__
class Skipped(OutcomeException):
# XXX hackish: on 3k we fake to live in the builtins
# in order to have Skipped exception printing shorter/nicer
__module__ = 'builtins'
class Failed(OutcomeException):
""" raised from an explicit call to pytest.fail() """
__module__ = 'builtins'
class Exit(KeyboardInterrupt):
""" raised for immediate program exits (no tracebacks/summaries)"""
def __init__(self, msg="unknown reason"):
self.msg = msg
KeyboardInterrupt.__init__(self, msg)
# exposed helper methods
def exit(msg):
""" exit testing process as if KeyboardInterrupt was triggered. """
__tracebackhide__ = True
raise Exit(msg)
exit.Exception = Exit
def skip(msg=""):
""" skip an executing test with the given message. Note: it's usually
better to use the pytest.mark.skipif marker to declare a test to be
skipped under certain conditions like mismatching platforms or
dependencies. See the pytest_skipping plugin for details.
"""
__tracebackhide__ = True
raise Skipped(msg=msg)
skip.Exception = Skipped
def fail(msg="", pytrace=True):
""" explicitly fail an currently-executing test with the given Message.
:arg pytrace: if false the msg represents the full failure information
and no python traceback will be reported.
"""
__tracebackhide__ = True
raise Failed(msg=msg, pytrace=pytrace)
fail.Exception = Failed
def importorskip(modname, minversion=None):
""" return imported module if it has at least "minversion" as its
__version__ attribute. If no minversion is specified the a skip
is only triggered if the module can not be imported.
"""
__tracebackhide__ = True
compile(modname, '', 'eval') # to catch syntaxerrors
try:
__import__(modname)
except ImportError:
skip("could not import %r" %(modname,))
mod = sys.modules[modname]
if minversion is None:
return mod
verattr = getattr(mod, '__version__', None)
if minversion is not None:
try:
from pkg_resources import parse_version as pv
except ImportError:
skip("we have a required version for %r but can not import "
"no pkg_resources to parse version strings." %(modname,))
if verattr is None or pv(verattr) < pv(minversion):
skip("module %r has __version__ %r, required is: %r" %(
modname, verattr, minversion))
return mod
|
jfinkels/networkx | refs/heads/master | networkx/algorithms/tests/test_richclub.py | 11 | import networkx as nx
from nose.tools import *
def test_richclub():
G = nx.Graph([(0,1),(0,2),(1,2),(1,3),(1,4),(4,5)])
rc = nx.richclub.rich_club_coefficient(G,normalized=False)
assert_equal(rc,{0: 12.0/30,1:8.0/12})
# test single value
rc0 = nx.richclub.rich_club_coefficient(G,normalized=False)[0]
assert_equal(rc0,12.0/30.0)
def test_richclub_normalized():
G = nx.Graph([(0,1),(0,2),(1,2),(1,3),(1,4),(4,5)])
rcNorm = nx.richclub.rich_club_coefficient(G,Q=2)
assert_equal(rcNorm,{0:1.0,1:1.0})
def test_richclub2():
T = nx.balanced_tree(2,10)
rc = nx.richclub.rich_club_coefficient(T,normalized=False)
assert_equal(rc,{0:4092/(2047*2046.0),
1:(2044.0/(1023*1022)),
2:(2040.0/(1022*1021))})
def test_richclub3():
#tests edgecase
G = nx.karate_club_graph()
rc = nx.rich_club_coefficient(G,normalized=False)
assert_equal(rc,{0:156.0/1122,
1:154.0/1056,
2:110.0/462,
3:78.0/240,
4:44.0/90,
5:22.0/42,
6:10.0/20,
7:10.0/20,
8:10.0/20,
9:6.0/12,
10:2.0/6,
11:2.0/6,
12:0.0,
13:0.0,
14:0.0,
15:0.0,})
def test_richclub4():
G = nx.Graph()
G.add_edges_from([(0,1),(0,2),(0,3),(0,4),(4,5),(5,9),(6,9),(7,9),(8,9)])
rc = nx.rich_club_coefficient(G,normalized=False)
assert_equal(rc,{0:18/90.0,
1:6/12.0,
2:0.0,
3:0.0})
@raises(nx.NetworkXNotImplemented)
def test_richclub_exception():
G = nx.DiGraph()
nx.rich_club_coefficient(G)
@raises(nx.NetworkXNotImplemented)
def test_rich_club_exception2():
G = nx.MultiGraph()
nx.rich_club_coefficient(G)
#def test_richclub2_normalized():
# T = nx.balanced_tree(2,10)
# rcNorm = nx.richclub.rich_club_coefficient(T,Q=2)
# assert_true(rcNorm[0] ==1.0 and rcNorm[1] < 0.9 and rcNorm[2] < 0.9)
|
tensorflow/neural-structured-learning | refs/heads/master | research/carls/candidate_sampling_ops.py | 1 | # Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Candidate sampling related ops."""
import typing
from research.carls import context
from research.carls import dynamic_embedding_config_pb2 as de_config_pb2
from research.carls.kernels import gen_carls_ops
import tensorflow as tf
def top_k(inputs: tf.Tensor,
k: int,
de_config: de_config_pb2.DynamicEmbeddingConfig,
var_name: typing.Text,
service_address: typing.Text = "",
timeout_ms: int = -1):
"""Computes logits for the top k closest embeddings to the inputs.
Args:
inputs: A float `Tensor` of shape `[batch_size, dim]` representing the
forward activations of the input network.
k: An `int` denoting the number of returned keys.
de_config: A DynamicEmbeddingConfig for configuring the dynamic embedding.
var_name: A unique name for the operation.
service_address: The address of a dynamic embedding service. If empty, the
value passed from --kbs_address flag will be used instead.
timeout_ms: Timeout millseconds for the connection. If negative, never
timout.
Returns:
keys: A string `Tensor` of shape `[batch_size, k]` representing the top k
keys relative to the input.
logits: A float `Tensor` of shape `[batch_size, k]` representing the logits
for the returned keys.
Raises:
ValueError: if k is not greater than zero.
Note: The (keys, logits) pair returned here should not be used for training as
they only represent biased sampling. Instead, use sampled_softmax_loss()
for training.
"""
if not var_name:
raise ValueError("Must specify a valid var_name.")
if k <= 0:
raise ValueError("k must be greater than zero, got %d" % k)
context.add_to_collection(var_name, de_config)
resource = gen_carls_ops.dynamic_embedding_manager_resource(
de_config.SerializeToString(), var_name, service_address, timeout_ms)
return gen_carls_ops.topk_lookup(inputs, k, resource)
def sampled_softmax_loss(positive_keys: tf.Tensor,
inputs: tf.Tensor,
num_samples: int,
de_config: de_config_pb2.DynamicEmbeddingConfig,
var_name: typing.Text,
service_address: typing.Text = "",
timeout_ms: int = -1):
"""Compute sampled Softmax loss from given input activations.
Args:
positive_keys: A string `Tensor` of shape `[batch_size, None]` representing
input positive keys.
inputs: A float `Tensor` of shape `[batch_size, dim]`, representing the
forward activations of the input network.
num_samples: An int denoting the returned positive and negative samples.
de_config: A DynamicEmbeddingConfig for configuring the dynamic embedding.
var_name: A unique name for the operation.
service_address: The address of a dynamic embedding service. If empty, the
value passed from --kbs_address flag will be used instead.
timeout_ms: Timeout millseconds for the connection. If negative, never
timout.
Returns:
A float `Tensor` representing the sampled softmax loss.
"""
logits, labels, _, mask, _ = compute_sampled_logits(positive_keys, inputs,
num_samples, de_config,
var_name, service_address,
timeout_ms)
tiled_norm = tf.tile(
tf.maximum(tf.reduce_sum(labels, -1, keepdims=True), 1),
[1, labels.get_shape()[-1]])
labels /= tiled_norm
return tf.reduce_sum(
tf.nn.softmax_cross_entropy_with_logits_v2(
labels=labels, logits=logits)) / tf.reduce_sum(mask)
def sampled_sigmoid_loss(positive_keys: tf.Tensor,
inputs: tf.Tensor,
num_samples: int,
de_config: de_config_pb2.DynamicEmbeddingConfig,
var_name: typing.Text,
service_address: typing.Text = "",
timeout_ms: int = -1):
"""Compute sampled sigmoid loss from given input activations.
Args:
positive_keys: A string `Tensor` of shape `[batch_size, None]` representing
input positive keys.
inputs: A float `Tensor` of shape `[batch_size, dim]`, representing the
forward activations of the input network.
num_samples: An int denoting the returned positive and negative samples.
de_config: A DynamicEmbeddingConfig for configuring the dynamic embedding.
var_name: A unique name for the operation.
service_address: The address of a dynamic embedding service. If empty, the
value passed from --kbs_address flag will be used instead.
timeout_ms: Timeout millseconds for the connection. If negative, never
timout.
Returns:
A float `Tensor` representing the sampled sigmoid loss.
"""
logits, labels, _, mask, _ = compute_sampled_logits(positive_keys, inputs,
num_samples, de_config,
var_name, service_address,
timeout_ms)
tiled_norm = tf.tile(
tf.maximum(tf.reduce_sum(labels, -1, keepdims=True), 1),
[1, labels.get_shape()[-1]])
labels /= tiled_norm
reduced_sum = tf.reduce_sum(
tf.nn.sigmoid_cross_entropy_with_logits(
labels=labels, logits=logits)) / tf.reduce_sum(mask)
return reduced_sum / num_samples
def compute_sampled_logits(positive_keys,
inputs,
num_samples: int,
de_config: de_config_pb2.DynamicEmbeddingConfig,
var_name: typing.Text,
service_address: typing.Text = "",
timeout_ms: int = -1):
"""Computes sampled logits from given positive labels.
Args:
positive_keys: A string `Tensor` of shape `[batch_size, None]` representing
input positive keys.
inputs: A float `Tensor` of shape `[batch_size, dim]` representing the
forward activations of the input network.
num_samples: An int denoting the returned positive and negative samples.
de_config: A DynamicEmbeddingConfig for configuring the dynamic embedding.
var_name: A unique name for the operation.
service_address: The address of a dynamic embedding service. If empty, the
value passed from --kbs_address flag will be used instead.
timeout_ms: Timeout millseconds for the connection. If negative, never
timout.
Returns:
logits: A float `Tensor` of shape `[batch_size, num_samples]` representing
the logits for sampled labels.
labels: A float `Tensor` of shape `[batch_size, num_samples]` with values
in {0, 1} indicating if the sample is positive or negative.
keys: A string `Tensor` of shape `[batch_size, num_samples]` representing
the keys for each sample.
mask: A float `Tensor` of shape `[batch_size]` representing the 0/1 mask
of each batch. For example, if all keys in positive_keys[i] are empty,
mask[i] = 0; otherwise mask[i] = 1.
weights: A float `Tensor` representing the embeddings of the sampled keys.
Raises:
ValueError: If var_name is not specified.
TypeError: If de_config is an instance of DynamicEmbeddingConfig.
"""
if not var_name:
raise ValueError("Must specify a valid name, got %s" % var_name)
if num_samples < 1:
raise ValueError("Invalid num_samples: %d" % num_samples)
context.add_to_collection(var_name, de_config)
resource = gen_carls_ops.dynamic_embedding_manager_resource(
de_config.SerializeToString(), var_name, service_address, timeout_ms)
# Create a dummy variable so that the gradients can be passed in.
grad_placeholder = tf.Variable(0.0)
keys, labels, expected_counts, mask, weights = (
gen_carls_ops.sampled_logits_lookup(positive_keys, inputs, num_samples,
grad_placeholder, resource))
# Compute sampled logits.
# Shape of weights: [d1, d2, dn-1, num_samples, embed_dim]
# Shape of inputs: [d1, d2, dn-1, embed_dim]
# Shape of output logits: [d1, d2, dn-1, num_samples]
# [d1, d2, dn-1, embed_dim] -> [d1, d2, dn-1, 1, embed_dim]
tiled_inputs = tf.expand_dims(inputs, axis=-2)
# [d1, d2, dn-1, embed_dim] -> [d1, d2, dn-1, num_samples, embed_dim]
multiples = [1] * (inputs.ndim + 1)
multiples[-2] = num_samples
tiled_inputs = tf.tile(tiled_inputs, multiples)
# [d1, d2, dn-1, num_samples, embed_dim] -> [d1, d2, dn-1, num_samples]
logits = tf.reduce_sum(weights * tiled_inputs, -1)
# Sampled logits.
logits -= tf.math.log(expected_counts)
return logits, labels, keys, mask, weights
@tf.RegisterGradient("SampledLogitsLookup")
def _sampled_logits_lookup_grad(op, keys_grad, labels_grad,
expected_counts_grad, mask_grad, weights_grad):
"""Computes the gradients for SampledLogitsLookup.
We uses the gradients w.r.t. the weights output of sampled_logits_lookup() to
update the embeddings/weights of the sampled keys.
The gradients for the inputs of sampled_logits_lookup should be provided, but
none of them needs to be back-propagated. So we set all of them to be zeros.
Args:
op: The DynamicEmbeddingLookup op.
keys_grad: The tensor representing the gradient w.r.t. the keys output.
labels_grad: The tensor representing the gradient w.r.t. the labels output.
expected_counts_grad: The tensor representing the gradient w.r.t. the
expected_counts output.
mask_grad: The tensor representing the gradient w.r.t. the mask output.
weights_grad: The tensor representing the gradient w.r.t. the weights
output.
Returns:
The gradients w.r.t. the input.
"""
del keys_grad, labels_grad, expected_counts_grad, mask_grad # Unused.
pos_keys_grad, num_samples_grad, dummy_variable_grad, resource_grad = (
gen_carls_ops.sampled_logits_lookup_grad(
keys=op.outputs[0],
weight_gradients=weights_grad,
handle=op.inputs[4]))
# Gradient for the input activation.
inputs_grad = tf.zeros_like(op.inputs[1])
return (pos_keys_grad, inputs_grad, num_samples_grad, dummy_variable_grad,
resource_grad)
|
mattt416/neutron | refs/heads/master | neutron/tests/unit/agent/metadata/test_driver.py | 16 | # Copyright 2014 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from oslo_config import cfg
from oslo_utils import uuidutils
from neutron.agent.common import config as agent_config
from neutron.agent.l3 import agent as l3_agent
from neutron.agent.l3 import config as l3_config
from neutron.agent.l3 import ha as l3_ha_agent
from neutron.agent.metadata import config
from neutron.agent.metadata import driver as metadata_driver
from neutron.common import constants
from neutron.tests import base
_uuid = uuidutils.generate_uuid
class TestMetadataDriverRules(base.BaseTestCase):
def test_metadata_nat_rules(self):
rules = ('PREROUTING', '-d 169.254.169.254/32 -i qr-+ '
'-p tcp -m tcp --dport 80 -j REDIRECT --to-port 8775')
self.assertEqual(
[rules],
metadata_driver.MetadataDriver.metadata_nat_rules(8775))
def test_metadata_filter_rules(self):
rules = [('INPUT', '-m mark --mark 0x1/%s -j ACCEPT' %
constants.ROUTER_MARK_MASK),
('INPUT', '-p tcp -m tcp --dport 8775 -j DROP')]
self.assertEqual(
rules,
metadata_driver.MetadataDriver.metadata_filter_rules(8775, '0x1'))
def test_metadata_mangle_rules(self):
rule = ('PREROUTING', '-d 169.254.169.254/32 -i qr-+ '
'-p tcp -m tcp --dport 80 '
'-j MARK --set-xmark 0x1/%s' %
constants.ROUTER_MARK_MASK)
self.assertEqual(
[rule],
metadata_driver.MetadataDriver.metadata_mangle_rules('0x1'))
class TestMetadataDriverProcess(base.BaseTestCase):
EUID = 123
EGID = 456
EUNAME = 'neutron'
def setUp(self):
super(TestMetadataDriverProcess, self).setUp()
mock.patch('eventlet.spawn').start()
agent_config.register_interface_driver_opts_helper(cfg.CONF)
cfg.CONF.set_override('interface_driver',
'neutron.agent.linux.interface.NullDriver')
agent_config.register_use_namespaces_opts_helper(cfg.CONF)
mock.patch('neutron.agent.l3.agent.L3PluginApi').start()
mock.patch('neutron.agent.l3.ha.AgentMixin'
'._init_ha_conf_path').start()
cfg.CONF.register_opts(l3_config.OPTS)
cfg.CONF.register_opts(l3_ha_agent.OPTS)
cfg.CONF.register_opts(config.SHARED_OPTS)
cfg.CONF.register_opts(config.DRIVER_OPTS)
def _test_spawn_metadata_proxy(self, expected_user, expected_group,
user='', group='', watch_log=True):
router_id = _uuid()
router_ns = 'qrouter-%s' % router_id
metadata_port = 8080
ip_class_path = 'neutron.agent.linux.ip_lib.IPWrapper'
is_effective_user = 'neutron.agent.linux.utils.is_effective_user'
fake_is_effective_user = lambda x: x in [self.EUNAME, str(self.EUID)]
cfg.CONF.set_override('metadata_proxy_user', user)
cfg.CONF.set_override('metadata_proxy_group', group)
cfg.CONF.set_override('log_file', 'test.log')
cfg.CONF.set_override('debug', True)
agent = l3_agent.L3NATAgent('localhost')
with mock.patch('os.geteuid', return_value=self.EUID),\
mock.patch('os.getegid', return_value=self.EGID),\
mock.patch(is_effective_user,
side_effect=fake_is_effective_user),\
mock.patch(ip_class_path) as ip_mock:
agent.metadata_driver.spawn_monitored_metadata_proxy(
agent.process_monitor,
router_ns,
metadata_port,
agent.conf,
router_id=router_id)
netns_execute_args = [
'neutron-ns-metadata-proxy',
mock.ANY,
mock.ANY,
'--router_id=%s' % router_id,
mock.ANY,
'--metadata_port=%s' % metadata_port,
'--metadata_proxy_user=%s' % expected_user,
'--metadata_proxy_group=%s' % expected_group,
'--debug',
'--verbose',
'--log-file=neutron-ns-metadata-proxy-%s.log' %
router_id]
if not watch_log:
netns_execute_args.append(
'--nometadata_proxy_watch_log')
ip_mock.assert_has_calls([
mock.call(namespace=router_ns),
mock.call().netns.execute(netns_execute_args, addl_env=None,
run_as_root=False)
])
def test_spawn_metadata_proxy_with_agent_user(self):
self._test_spawn_metadata_proxy(
self.EUNAME, str(self.EGID), user=self.EUNAME)
def test_spawn_metadata_proxy_with_nonagent_user(self):
self._test_spawn_metadata_proxy(
'notneutron', str(self.EGID), user='notneutron', watch_log=False)
def test_spawn_metadata_proxy_with_agent_uid(self):
self._test_spawn_metadata_proxy(
str(self.EUID), str(self.EGID), user=str(self.EUID))
def test_spawn_metadata_proxy_with_nonagent_uid(self):
self._test_spawn_metadata_proxy(
'321', str(self.EGID), user='321', watch_log=False)
def test_spawn_metadata_proxy_with_group(self):
self._test_spawn_metadata_proxy(str(self.EUID), 'group', group='group')
def test_spawn_metadata_proxy_with_gid(self):
self._test_spawn_metadata_proxy(str(self.EUID), '654', group='654')
def test_spawn_metadata_proxy(self):
self._test_spawn_metadata_proxy(str(self.EUID), str(self.EGID))
|
jallohm/django | refs/heads/master | django/conf/locale/tr/formats.py | 504 | # -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
from __future__ import unicode_literals
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = 'd F Y'
TIME_FORMAT = 'H:i'
DATETIME_FORMAT = 'd F Y H:i'
YEAR_MONTH_FORMAT = 'F Y'
MONTH_DAY_FORMAT = 'd F'
SHORT_DATE_FORMAT = 'd M Y'
SHORT_DATETIME_FORMAT = 'd M Y H:i'
FIRST_DAY_OF_WEEK = 1 # Pazartesi
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
DATE_INPUT_FORMATS = [
'%d/%m/%Y', '%d/%m/%y', # '25/10/2006', '25/10/06'
'%y-%m-%d', # '06-10-25'
# '%d %B %Y', '%d %b. %Y', # '25 Ekim 2006', '25 Eki. 2006'
]
DATETIME_INPUT_FORMATS = [
'%d/%m/%Y %H:%M:%S', # '25/10/2006 14:30:59'
'%d/%m/%Y %H:%M:%S.%f', # '25/10/2006 14:30:59.000200'
'%d/%m/%Y %H:%M', # '25/10/2006 14:30'
'%d/%m/%Y', # '25/10/2006'
]
DECIMAL_SEPARATOR = ','
THOUSAND_SEPARATOR = '.'
NUMBER_GROUPING = 3
|
diofeher/django-nfa | refs/heads/master | django/contrib/localflavor/nl/forms.py | 2 | """
NL-specific Form helpers
"""
import re
from django.newforms import ValidationError
from django.newforms.fields import Field, Select, EMPTY_VALUES
from django.utils.translation import ugettext_lazy as _
from django.utils.encoding import smart_unicode
pc_re = re.compile('^\d{4}[A-Z]{2}$')
sofi_re = re.compile('^\d{9}$')
numeric_re = re.compile('^\d+$')
class NLZipCodeField(Field):
"""
A Dutch postal code field.
"""
default_error_messages = {
'invalid': _('Enter a valid postal code'),
}
def clean(self, value):
super(NLZipCodeField, self).clean(value)
if value in EMPTY_VALUES:
return u''
value = value.strip().upper().replace(' ', '')
if not pc_re.search(value):
raise ValidationError(self.error_messages['invalid'])
if int(value[:4]) < 1000:
raise ValidationError(self.error_messages['invalid'])
return u'%s %s' % (value[:4], value[4:])
class NLProvinceSelect(Select):
"""
A Select widget that uses a list of provinces of the Netherlands as its
choices.
"""
def __init__(self, attrs=None):
from nl_provinces import PROVINCE_CHOICES
super(NLProvinceSelect, self).__init__(attrs, choices=PROVINCE_CHOICES)
class NLPhoneNumberField(Field):
"""
A Dutch telephone number field.
"""
default_error_messages = {
'invalid': _('Enter a valid phone number'),
}
def clean(self, value):
super(NLPhoneNumberField, self).clean(value)
if value in EMPTY_VALUES:
return u''
phone_nr = re.sub('[\-\s\(\)]', '', smart_unicode(value))
if len(phone_nr) == 10 and numeric_re.search(phone_nr):
return value
if phone_nr[:3] == '+31' and len(phone_nr) == 12 and \
numeric_re.search(phone_nr[3:]):
return value
raise ValidationError(self.error_messages['invalid'])
class NLSoFiNumberField(Field):
"""
A Dutch social security number (SoFi/BSN) field.
http://nl.wikipedia.org/wiki/Sofinummer
"""
default_error_messages = {
'invalid': _('Enter a valid SoFi number'),
}
def clean(self, value):
super(NLSoFiNumberField, self).clean(value)
if value in EMPTY_VALUES:
return u''
if not sofi_re.search(value):
raise ValidationError(self.error_messages['invalid'])
if int(value) == 0:
raise ValidationError(self.error_messages['invalid'])
checksum = 0
for i in range(9, 1, -1):
checksum += int(value[9-i]) * i
checksum -= int(value[-1])
if checksum % 11 != 0:
raise ValidationError(self.error_messages['invalid'])
return value
|
buddycloud/buddycloud-iOS-client | refs/heads/master | src/External_Libs/three20/src/scripts/Pbxproj.py | 1 | #!/usr/bin/env python
# encoding: utf-8
"""
Pbxproj.py
Working with the pbxproj file format is a pain in the ass.
This object provides a couple basic features for parsing pbxproj files:
* Getting a dependency list
* Adding one pbxproj to another pbxproj as a dependency
Version 1.0.
History:
1.0 - October 20, 2010: Initial hacked-together version finished. It is alive!
Created by Jeff Verkoeyen on 2010-10-18.
Copyright 2009-2010 Facebook
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import hashlib
import logging
import os
import re
import sys
import Paths
pbxproj_cache = {}
# The following relative path methods recyled from:
# http://code.activestate.com/recipes/208993-compute-relative-path-from-one-directory-to-anothe/
# Author: Cimarron Taylor
# Date: July 6, 2003
def pathsplit(p, rest=[]):
(h,t) = os.path.split(p)
if len(h) < 1: return [t]+rest
if len(t) < 1: return [h]+rest
return pathsplit(h,[t]+rest)
def commonpath(l1, l2, common=[]):
if len(l1) < 1: return (common, l1, l2)
if len(l2) < 1: return (common, l1, l2)
if l1[0] != l2[0]: return (common, l1, l2)
return commonpath(l1[1:], l2[1:], common+[l1[0]])
def relpath(p1, p2):
(common,l1,l2) = commonpath(pathsplit(p1), pathsplit(p2))
p = []
if len(l1) > 0:
p = [ '../' * len(l1) ]
p = p + l2
return os.path.join( *p )
class Pbxproj(object):
@staticmethod
def get_pbxproj_by_name(name):
if name not in pbxproj_cache:
pbxproj_cache[name] = Pbxproj(name)
return pbxproj_cache[name]
# Valid names
# Three20
# Three20:Three20-Xcode3.2.5
# /path/to/project.xcodeproj/project.pbxproj
def __init__(self, name):
self._project_data = None
parts = name.split(':')
self.name = parts[0]
if len(parts) > 1:
self.target = parts[1]
else:
if re.match('^[a-zA-Z0-9\.\-:+"]+$', self.name):
self.target = self.name
else:
result = re.search('([a-zA-Z0-9\.\-+"]+)\.xcodeproj', self.name)
if not result:
self.target = self.name
else:
(self.target, ) = result.groups()
match = re.search('([a-zA-Z0-9\.\-+"]+)\.xcodeproj', self.name)
if not match:
self._project_name = self.name
else:
(self._project_name, ) = match.groups()
self._guid = None
self._deps = None
self.guid()
def __str__(self):
return str(self.name)+" target:"+str(self.target)+" guid:"+str(self._guid)+" prodguid: "+self._product_guid+" prodname: "+self._product_name
def uniqueid(self):
return self.name + ':' + self.target
def path(self):
# TODO: No sense calculating this every time, just store it when we get the name.
if re.match('^[a-zA-Z0-9\.\-:+"]+$', self.name):
return os.path.join(Paths.src_dir, self.name.strip('"'), self.name.strip('"')+'.xcodeproj', 'project.pbxproj')
elif not re.match('project.pbxproj$', self.name):
return os.path.join(self.name, 'project.pbxproj')
else:
return self.name
# A pbxproj file is contained within an xcodeproj file.
# This method simply strips off the project.pbxproj part of the path.
def xcodeprojpath(self):
return os.path.dirname(self.path())
def guid(self):
if not self._guid:
self.dependencies()
return self._guid
# Load the project data from disk.
def get_project_data(self):
if self._project_data is None:
if not os.path.exists(self.path()):
logging.info("Couldn't find the project at this path:")
logging.info(self.path())
return None
project_file = open(self.path(), 'r')
self._project_data = project_file.read()
return self._project_data
# Write the project data to disk.
def set_project_data(self, project_data):
if self._project_data != project_data:
self._project_data = project_data
project_file = open(self.path(), 'w')
project_file.write(self._project_data)
# Get and cache the dependencies for this project.
def dependencies(self):
if self._deps is not None:
return self._deps
project_data = self.get_project_data()
if project_data is None:
return None
result = re.search('([A-Z0-9]+) \/\* '+re.escape(self.target)+' \*\/ = {\n[ \t]+isa = PBXNativeTarget;(?:.|\n)+?buildPhases = \(\n((?:.|\n)+?)\);\n(?:.|\n)+?dependencies = \(\n((?:[ \t]+[A-Z0-9]+ \/\* PBXTargetDependency \*\/,\n)*)[ \t]*\);\n(?:.|\n)+?productReference = ([A-Z0-9]+) \/\* (.+?) \*\/;',
project_data)
if not result:
return None
(self._guid, buildPhases, dependency_set, self._product_guid, self._product_name, ) = result.groups()
dependency_guids = re.findall('[ \t]+([A-Z0-9]+) \/\* PBXTargetDependency \*\/,\n', dependency_set)
match = re.search('([A-Z0-9]+) \/\* Resources \*\/', buildPhases)
if match:
(self._resources_guid, ) = match.groups()
else:
self._resources_guid = None
match = re.search('([A-Z0-9]+) \/\* Frameworks \*\/', buildPhases)
if not match:
logging.error("Couldn't find the Frameworks phase.")
return None
(self._frameworks_guid, ) = match.groups()
if not result:
return None
dependency_names = []
for guid in dependency_guids:
result = re.search(guid+' \/\* PBXTargetDependency \*\/ = \{\n[ \t]+isa = PBXTargetDependency;\n[ \t]*name = (["a-zA-Z0-9\.\-]+);',
project_data)
if result:
(dependency_name, ) = result.groups()
dependency_names.append(dependency_name)
self._deps = dependency_names
return self._deps
# Add a line to the PBXBuildFile section.
#
# <default_guid> /* <name> in Frameworks */ = {isa = PBXBuildFile; fileRef = <file_ref_hash> /* <name> */; };
#
# Returns: <default_guid> if a line was added.
# Otherwise, the existing guid is returned.
def add_buildfile(self, name, file_ref_hash, default_guid):
project_data = self.get_project_data()
match = re.search('\/\* Begin PBXBuildFile section \*\/\n((?:.|\n)+?)\/\* End PBXBuildFile section \*\/', project_data)
if not match:
logging.error("Couldn't find PBXBuildFile section.")
return None
(subtext, ) = match.groups()
buildfile_hash = None
match = re.search('([A-Z0-9]+).+?fileRef = '+re.escape(file_ref_hash), subtext)
if match:
(buildfile_hash, ) = match.groups()
logging.info("This build file already exists: "+buildfile_hash)
if buildfile_hash is None:
match = re.search('\/\* Begin PBXBuildFile section \*\/\n', project_data)
buildfile_hash = default_guid
libfiletext = "\t\t"+buildfile_hash+" /* "+name+" in Frameworks */ = {isa = PBXBuildFile; fileRef = "+file_ref_hash+" /* "+name+" */; };\n"
project_data = project_data[:match.end()] + libfiletext + project_data[match.end():]
self.set_project_data(project_data)
return buildfile_hash
# Add a line to the PBXFileReference section.
#
# <default_guid> /* <name> */ = {isa = PBXFileReference; lastKnownFileType = "wrapper.<file_type>"; name = <name>; path = <rel_path>; sourceTree = <source_tree>; };
#
# Returns: <default_guid> if a line was added.
# Otherwise, the existing guid is returned.
def add_filereference(self, name, file_type, default_guid, rel_path, source_tree):
project_data = self.get_project_data()
fileref_hash = None
match = re.search('([A-Z0-9]+) \/\* '+re.escape(name)+' \*\/ = \{isa = PBXFileReference; lastKnownFileType = "wrapper.'+file_type+'"; name = '+re.escape(name)+'; path = '+re.escape(rel_path)+';', project_data)
if match:
logging.info("This file has already been added.")
(fileref_hash, ) = match.groups()
else:
match = re.search('\/\* Begin PBXFileReference section \*\/\n', project_data)
if not match:
logging.error("Couldn't find the PBXFileReference section.")
return False
fileref_hash = default_guid
pbxfileref = "\t\t"+fileref_hash+" /* "+name+" */ = {isa = PBXFileReference; lastKnownFileType = \"wrapper."+file_type+"\"; name = "+name+"; path = "+rel_path+"; sourceTree = "+source_tree+"; };\n"
project_data = project_data[:match.end()] + pbxfileref + project_data[match.end():]
self.set_project_data(project_data)
return fileref_hash
# Add a file to the given PBXGroup.
#
# <guid> /* <name> */,
def add_file_to_group(self, name, guid, group):
project_data = self.get_project_data()
match = re.search('\/\* '+re.escape(group)+' \*\/ = \{\n[ \t]+isa = PBXGroup;\n[ \t]+children = \(\n((?:.|\n)+?)\);', project_data)
if not match:
logging.error("Couldn't find the "+group+" children.")
return False
(children,) = match.groups()
match = re.search(re.escape(guid), children)
if match:
logging.info("This file is already a member of the "+name+" group.")
else:
match = re.search('\/\* '+re.escape(group)+' \*\/ = \{\n[ \t]+isa = PBXGroup;\n[ \t]+children = \(\n', project_data)
if not match:
logging.error("Couldn't find the "+group+" group.")
return False
pbxgroup = "\t\t\t\t"+guid+" /* "+name+" */,\n"
project_data = project_data[:match.end()] + pbxgroup + project_data[match.end():]
self.set_project_data(project_data)
return True
# Add a file to the Frameworks PBXGroup.
#
# <guid> /* <name> */,
def add_file_to_frameworks(self, name, guid):
return self.add_file_to_group(name, guid, 'Frameworks')
# Add a file to the Resources PBXGroup.
#
# <guid> /* <name> */,
def add_file_to_resources(self, name, guid):
return self.add_file_to_group(name, guid, 'Resources')
def add_file_to_phase(self, name, guid, phase_guid, phase):
project_data = self.get_project_data()
match = re.search(re.escape(phase_guid)+" \/\* "+re.escape(phase)+" \*\/ = {(?:.|\n)+?files = \(((?:.|\n)+?)\);", project_data)
if not match:
logging.error("Couldn't find the "+phase+" phase.")
return False
(files, ) = match.groups()
match = re.search(re.escape(guid), files)
if match:
logging.info("The file has already been added.")
else:
match = re.search(re.escape(phase_guid)+" \/\* "+phase+" \*\/ = {(?:.|\n)+?files = \(\n", project_data)
if not match:
logging.error("Couldn't find the "+phase+" files")
return False
frameworktext = "\t\t\t\t"+guid+" /* "+name+" in "+phase+" */,\n"
project_data = project_data[:match.end()] + frameworktext + project_data[match.end():]
self.set_project_data(project_data)
return True
def get_rel_path_to_products_dir(self):
project_path = os.path.dirname(os.path.abspath(self.xcodeprojpath()))
build_path = os.path.join(os.path.join(os.path.dirname(Paths.src_dir), 'Build'), 'Products')
return relpath(project_path, build_path)
def add_file_to_frameworks_phase(self, name, guid):
return self.add_file_to_phase(name, guid, self._frameworks_guid, 'Frameworks')
def add_file_to_resources_phase(self, name, guid):
if self._resources_guid is None:
logging.error("No resources build phase found in the destination project")
return False
return self.add_file_to_phase(name, guid, self._resources_guid, 'Resources')
def add_header_search_path(self, configuration):
project_path = os.path.dirname(os.path.abspath(self.xcodeprojpath()))
build_path = os.path.join(os.path.join(os.path.join(os.path.dirname(Paths.src_dir), 'Build'), 'Products'), 'three20')
rel_path = relpath(project_path, build_path)
return self.add_build_setting(configuration, 'HEADER_SEARCH_PATHS', '"'+rel_path+'"')
def add_build_setting(self, configuration, setting_name, value):
project_data = self.get_project_data()
match = re.search('\/\* '+configuration+' \*\/ = {\n[ \t]+isa = XCBuildConfiguration;\n[ \t]+buildSettings = \{\n((?:.|\n)+?)\};', project_data)
if not match:
print "Couldn't find this configuration."
return False
settings_start = match.start(1)
settings_end = match.end(1)
(build_settings, ) = match.groups()
match = re.search(re.escape(setting_name)+' = ((?:.|\n)+?);', build_settings)
if not match:
# Add a brand new build setting. No checking for existing settings necessary.
settingtext = '\t\t\t\t'+setting_name+' = '+value+';\n'
project_data = project_data[:settings_start] + settingtext + project_data[settings_start:]
else:
# Build settings already exist. Is there one or many?
(search_paths,) = match.groups()
if re.search('\(\n', search_paths):
# Many
match = re.search(re.escape(value), search_paths)
if not match:
# If value has any spaces in it, Xcode will split it up into
# multiple entries.
escaped_value = re.escape(value).replace(' ', '",\n[ \t]+"')
match = re.search(escaped_value, search_paths)
if not match:
match = re.search(re.escape(setting_name)+' = \(\n', build_settings)
build_settings = build_settings[:match.end()] + '\t\t\t\t\t'+value+',\n' + build_settings[match.end():]
project_data = project_data[:settings_start] + build_settings + project_data[settings_end:]
else:
# One
if search_paths != value:
existing_path = search_paths
path_set = '(\n\t\t\t\t\t'+value+',\n\t\t\t\t\t'+existing_path+'\n\t\t\t\t)'
build_settings = build_settings[:match.start(1)] + path_set + build_settings[match.end(1):]
project_data = project_data[:settings_start] + build_settings + project_data[settings_end:]
self.set_project_data(project_data)
return True
def get_hash_base(self, uniquename):
examplehash = '320FFFEEEDDDCCCBBBAAA000'
uniquehash = hashlib.sha224(uniquename).hexdigest().upper()
uniquehash = uniquehash[:len(examplehash) - 4]
return '320'+uniquehash
def add_framework(self, framework):
tthash_base = self.get_hash_base(framework)
fileref_hash = self.add_filereference(framework, 'frameworks', tthash_base+'0', 'System/Library/Frameworks/'+framework, 'SDK_ROOT')
libfile_hash = self.add_buildfile(framework, fileref_hash, tthash_base+'1')
if not self.add_file_to_frameworks(framework, fileref_hash):
return False
if not self.add_file_to_frameworks_phase(framework, libfile_hash):
return False
return True
def add_bundle(self):
tthash_base = self.get_hash_base('Three20.bundle')
project_path = os.path.dirname(os.path.abspath(self.xcodeprojpath()))
build_path = os.path.join(Paths.src_dir, 'Three20.bundle')
rel_path = relpath(project_path, build_path)
fileref_hash = self.add_filereference('Three20.bundle', 'plug-in', tthash_base+'0', rel_path, 'SOURCE_ROOT')
libfile_hash = self.add_buildfile('Three20.bundle', fileref_hash, tthash_base+'1')
if not self.add_file_to_resources('Three20.bundle', fileref_hash):
return False
if not self.add_file_to_resources_phase('Three20.bundle', libfile_hash):
return False
return True
def add_dependency(self, dep):
project_data = self.get_project_data()
dep_data = dep.get_project_data()
if project_data is None or dep_data is None:
return False
logging.info("\nAdding "+str(dep)+" to "+str(self))
project_path = os.path.dirname(os.path.abspath(self.xcodeprojpath()))
dep_path = os.path.abspath(dep.xcodeprojpath())
rel_path = relpath(project_path, dep_path)
logging.info("")
logging.info("Project path: "+project_path)
logging.info("Dependency path: "+dep_path)
logging.info("Relative path: "+rel_path)
tthash_base = self.get_hash_base(dep.uniqueid())
###############################################
logging.info("")
logging.info("Step 1: Add file reference to the dependency...")
self.set_project_data(project_data)
pbxfileref_hash = self.add_filereference(dep._project_name+'.xcodeproj', 'pb-project', tthash_base+'0', rel_path, 'SOURCE_ROOT')
project_data = self.get_project_data()
logging.info("Done: Added file reference: "+pbxfileref_hash)
###############################################
logging.info("")
logging.info("Step 2: Add file to Frameworks group...")
self.set_project_data(project_data)
if not self.add_file_to_frameworks(dep._project_name+".xcodeproj", pbxfileref_hash):
return False
project_data = self.get_project_data()
logging.info("Done: Added file to Frameworks group.")
###############################################
logging.info("")
logging.info("Step 3: Add dependencies...")
pbxtargetdependency_hash = None
pbxcontaineritemproxy_hash = None
match = re.search('\/\* Begin PBXTargetDependency section \*\/\n((?:.|\n)+?)\/\* End PBXTargetDependency section \*\/', project_data)
if not match:
logging.info("\tAdding a PBXTargetDependency section...")
match = re.search('\/\* End PBXSourcesBuildPhase section \*\/\n', project_data)
if not match:
logging.error("Couldn't find the PBXSourcesBuildPhase section.")
return False
project_data = project_data[:match.end()] + "\n/* Begin PBXTargetDependency section */\n\n/* End PBXTargetDependency section */\n" + project_data[match.end():]
else:
(subtext, ) = match.groups()
match = re.search('([A-Z0-9]+) \/\* PBXTargetDependency \*\/ = {\n[ \t]+isa = PBXTargetDependency;\n[ \t]+name = '+re.escape(dep._project_name)+';\n[ \t]+targetProxy = ([A-Z0-9]+) \/\* PBXContainerItemProxy \*\/;', project_data)
if match:
(pbxtargetdependency_hash, pbxcontaineritemproxy_hash,) = match.groups()
logging.info("This dependency already exists.")
if pbxtargetdependency_hash is None or pbxcontaineritemproxy_hash is None:
match = re.search('\/\* Begin PBXTargetDependency section \*\/\n', project_data)
pbxtargetdependency_hash = tthash_base+'1'
pbxcontaineritemproxy_hash = tthash_base+'2'
pbxtargetdependency = "\t\t"+pbxtargetdependency_hash+" /* PBXTargetDependency */ = {\n\t\t\tisa = PBXTargetDependency;\n\t\t\tname = "+dep._project_name+";\n\t\t\ttargetProxy = "+pbxcontaineritemproxy_hash+" /* PBXContainerItemProxy */;\n\t\t};\n"
project_data = project_data[:match.end()] + pbxtargetdependency + project_data[match.end():]
logging.info("Done: Added dependency.")
###############################################
logging.info("")
logging.info("Step 3.1: Add container proxy for dependencies...")
containerExists = False
match = re.search('\/\* Begin PBXContainerItemProxy section \*\/\n((?:.|\n)+?)\/\* End PBXContainerItemProxy section \*\/', project_data)
if not match:
logging.info("\tAdding a PBXContainerItemProxy section...")
match = re.search('\/\* End PBXBuildFile section \*\/\n', project_data)
if not match:
logging.error("Couldn't find the PBXBuildFile section.")
return False
project_data = project_data[:match.end()] + "\n/* Begin PBXContainerItemProxy section */\n\n/* End PBXContainerItemProxy section */\n" + project_data[match.end():]
else:
(subtext, ) = match.groups()
match = re.search(re.escape(pbxcontaineritemproxy_hash), subtext)
if match:
logging.info("This container proxy already exists.")
containerExists = True
if not containerExists:
match = re.search('\/\* Begin PBXContainerItemProxy section \*\/\n', project_data)
pbxcontaineritemproxy = "\t\t"+pbxcontaineritemproxy_hash+" /* PBXContainerItemProxy */ = {\n\t\t\tisa = PBXContainerItemProxy;\n\t\t\tcontainerPortal = "+pbxfileref_hash+" /* "+dep._project_name+".xcodeproj */;\n\t\t\tproxyType = 1;\n\t\t\tremoteGlobalIDString = "+dep.guid()+";\n\t\t\tremoteInfo = "+dep._project_name+";\n\t\t};\n"
project_data = project_data[:match.end()] + pbxcontaineritemproxy + project_data[match.end():]
logging.info("Done: Added container proxy.")
###############################################
logging.info("")
logging.info("Step 3.2: Add module to the dependency list...")
match = re.search(self.guid()+' \/\* .+? \*\/ = {\n[ \t]+(?:.|\n)+?[ \t]+dependencies = \(\n((?:.|\n)+?)\);', project_data)
dependency_exists = False
if not match:
logging.error("Couldn't find the dependency list.")
return False
else:
(dependencylist, ) = match.groups()
match = re.search(re.escape(pbxtargetdependency_hash), dependencylist)
if match:
logging.info("This dependency has already been added.")
dependency_exists = True
if not dependency_exists:
match = re.search(self.guid()+' \/\* .+? \*\/ = {\n[ \t]+(?:.|\n)+?[ \t]+dependencies = \(\n', project_data)
if not match:
logging.error("Couldn't find the dependency list.")
return False
dependency_item = '\t\t\t\t'+pbxtargetdependency_hash+' /* PBXTargetDependency */,\n'
project_data = project_data[:match.end()] + dependency_item + project_data[match.end():]
logging.info("Done: Added module to the dependency list.")
###############################################
logging.info("")
logging.info("Step 4: Create project references...")
match = re.search('\/\* Begin PBXProject section \*\/\n((?:.|\n)+?)\/\* End PBXProject section \*\/', project_data)
if not match:
logging.error("Couldn't find the project section.")
return False
project_start = match.start(1)
project_end = match.end(1)
(project_section, ) = match.groups()
reference_exists = False
did_change = False
productgroup_hash = None
match = re.search('projectReferences = \(\n((?:.|\n)+?)\n[ \t]+\);', project_section)
if not match:
logging.info("Creating project references...")
match = re.search('projectDirPath = ".*?";\n', project_section)
if not match:
logging.error("Couldn't find project references anchor.")
return False
did_change = True
project_section = project_section[:match.end()] + '\t\t\tprojectReferences = (\n\t\t\t);\n' + project_section[match.end():]
else:
(refs, ) = match.groups()
match = re.search('\{\n[ \t]+ProductGroup = ([A-Z0-9]+) \/\* Products \*\/;\n[ \t]+ProjectRef = '+re.escape(pbxfileref_hash), refs)
if match:
(productgroup_hash, ) = match.groups()
logging.info("This product group already exists: "+productgroup_hash)
reference_exists = True
if not reference_exists:
match = re.search('projectReferences = \(\n', project_section)
if not match:
logging.error("Missing the project references item.")
return False
productgroup_hash = tthash_base+'3'
reference_text = '\t\t\t\t{\n\t\t\t\t\tProductGroup = '+productgroup_hash+' /* Products */;\n\t\t\t\t\tProjectRef = '+pbxfileref_hash+' /* '+dep._project_name+'.xcodeproj */;\n\t\t\t\t},\n'
project_section = project_section[:match.end()] + reference_text + project_section[match.end():]
did_change = True
if did_change:
project_data = project_data[:project_start] + project_section + project_data[project_end:]
logging.info("Done: Created project reference.")
###############################################
logging.info("")
logging.info("Step 4.1: Create product group...")
match = re.search('\/\* Begin PBXGroup section \*\/\n', project_data)
if not match:
logging.error("Couldn't find the group section.")
return False
group_start = match.end()
lib_hash = None
match = re.search(re.escape(productgroup_hash)+" \/\* Products \*\/ = \{\n[ \t]+isa = PBXGroup;\n[ \t]+children = \(\n((?:.|\n)+?)\);", project_data)
if match:
logging.info("This product group already exists.")
(children, ) = match.groups()
match = re.search('([A-Z0-9]+) \/\* '+re.escape(dep._product_name)+' \*\/', children)
if not match:
logging.error("No product found")
return False
# TODO: Add this product.
else:
(lib_hash, ) = match.groups()
else:
lib_hash = tthash_base+'4'
productgrouptext = "\t\t"+productgroup_hash+" /* Products */ = {\n\t\t\tisa = PBXGroup;\n\t\t\tchildren = (\n\t\t\t\t"+lib_hash+" /* "+dep._product_name+" */,\n\t\t\t);\n\t\t\tname = Products;\n\t\t\tsourceTree = \"<group>\";\n\t\t};\n"
project_data = project_data[:group_start] + productgrouptext + project_data[group_start:]
logging.info("Done: Created product group: "+lib_hash)
###############################################
logging.info("")
logging.info("Step 4.2: Add container proxy for target product...")
containerExists = False
targetproduct_hash = tthash_base+'6'
match = re.search('\/\* Begin PBXContainerItemProxy section \*\/\n((?:.|\n)+?)\/\* End PBXContainerItemProxy section \*\/', project_data)
if not match:
logging.info("\tAdding a PBXContainerItemProxy section...")
match = re.search('\/\* End PBXBuildFile section \*\/\n', project_data)
if not match:
logging.error("Couldn't find the PBXBuildFile section.")
return False
project_data = project_data[:match.end()] + "\n/* Begin PBXContainerItemProxy section */\n\n/* End PBXContainerItemProxy section */\n" + project_data[match.end():]
else:
(subtext, ) = match.groups()
match = re.search(re.escape(targetproduct_hash), subtext)
if match:
logging.info("This container proxy already exists.")
containerExists = True
if not containerExists:
match = re.search('\/\* Begin PBXContainerItemProxy section \*\/\n', project_data)
pbxcontaineritemproxy = "\t\t"+targetproduct_hash+" /* PBXContainerItemProxy */ = {\n\t\t\tisa = PBXContainerItemProxy;\n\t\t\tcontainerPortal = "+pbxfileref_hash+" /* "+dep._project_name+".xcodeproj */;\n\t\t\tproxyType = 2;\n\t\t\tremoteGlobalIDString = "+dep._product_guid+";\n\t\t\tremoteInfo = "+dep._project_name+";\n\t\t};\n"
project_data = project_data[:match.end()] + pbxcontaineritemproxy + project_data[match.end():]
logging.info("Done: Added target container proxy.")
###############################################
logging.info("")
logging.info("Step 4.3: Create reference proxy...")
referenceExists = False
match = re.search('\/\* Begin PBXReferenceProxy section \*\/\n((?:.|\n)+?)\/\* End PBXReferenceProxy section \*\/', project_data)
if not match:
logging.info("\tAdding a PBXReferenceProxy section...")
match = re.search('\/\* End PBXProject section \*\/\n', project_data)
if not match:
logging.error("Couldn't find the PBXProject section.")
return False
project_data = project_data[:match.end()] + "\n/* Begin PBXReferenceProxy section */\n\n/* End PBXReferenceProxy section */\n" + project_data[match.end():]
else:
(subtext, ) = match.groups()
match = re.search(re.escape(lib_hash), subtext)
if match:
logging.info("This reference proxy already exists.")
referenceExists = True
if not referenceExists:
match = re.search('\/\* Begin PBXReferenceProxy section \*\/\n', project_data)
referenceproxytext = "\t\t"+lib_hash+" /* "+dep._product_name+" */ = {\n\t\t\tisa = PBXReferenceProxy;\n\t\t\tfileType = archive.ar;\n\t\t\tpath = \""+dep._product_name+"\";\n\t\t\tremoteRef = "+targetproduct_hash+" /* PBXContainerItemProxy */;\n\t\t\tsourceTree = BUILT_PRODUCTS_DIR;\n\t\t};\n"
project_data = project_data[:match.end()] + referenceproxytext + project_data[match.end():]
logging.info("Done: Created reference proxy.")
###############################################
logging.info("")
logging.info("Step 5: Add target file...")
self.set_project_data(project_data)
libfile_hash = self.add_buildfile(dep._product_name, lib_hash, tthash_base+'5')
project_data = self.get_project_data()
logging.info("Done: Added target file.")
###############################################
logging.info("")
logging.info("Step 6: Add frameworks...")
self.set_project_data(project_data)
self.add_file_to_frameworks_phase(dep._product_name, libfile_hash)
project_data = self.get_project_data()
logging.info("Done: Adding module.")
self.set_project_data(project_data)
return True
|
CatsAndDogsbvba/odoo | refs/heads/8.0 | addons/purchase_double_validation/__openerp__.py | 260 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name' : 'Double Validation on Purchases',
'version' : '1.1',
'category': 'Purchase Management',
'depends' : ['base','purchase'],
'author' : 'OpenERP SA',
'description': """
Double-validation for purchases exceeding minimum amount.
=========================================================
This module modifies the purchase workflow in order to validate purchases that
exceeds minimum amount set by configuration wizard.
""",
'website': 'https://www.odoo.com/page/purchase',
'data': [
'purchase_double_validation_workflow.xml',
'purchase_double_validation_installer.xml',
'purchase_double_validation_view.xml',
],
'test': [
'test/purchase_double_validation_demo.yml',
'test/purchase_double_validation_test.yml'
],
'demo': [],
'installable': True,
'auto_install': False
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
gutomaia/nesasm_py | refs/heads/0.0.x | nesasm/tests/code_line_generator_test.py | 1 | # -*- coding: utf-8 -*-
from unittest import TestCase, skip
from lexical import code_line_generator
from types import GeneratorType
from tempfile import NamedTemporaryFile
class CodeLineGeneratorTest(TestCase):
@skip('TODO')
def test_unicode(self):
code = u'; Something\nCPY #$11'
gen = code_line_generator(code)
self.assertIsInstance(gen, GeneratorType)
self.assertEqual(u'; Something\n', next(gen))
self.assertEqual(u'CPY #$11', next(gen))
with self.assertRaises(StopIteration):
next(gen)
@skip('TODO')
def test_byte_string(self):
code = 'CPX #$0A\n; Another\n; idea\n'
gen = code_line_generator(code)
self.assertIsInstance(gen, GeneratorType)
self.assertEqual('CPX #$0A\n', next(gen))
self.assertEqual('; Another\n', next(gen))
self.assertEqual('; idea\n', next(gen))
with self.assertRaises(StopIteration):
next(gen)
@skip('TODO')
def test_real_file(self):
with NamedTemporaryFile(mode="r+") as f:
f.write("; this\nADC #$0A\n;test\n\n")
f.seek(0)
gen = code_line_generator(f)
self.assertEqual('; this\n', next(gen))
self.assertEqual('ADC #$0A\n', next(gen))
self.assertEqual(';test\n', next(gen))
self.assertEqual('\n', next(gen))
with self.assertRaises(StopIteration):
next(gen)
|
Pretio/boto | refs/heads/develop | boto/ec2/autoscale/request.py | 152 | # Copyright (c) 2009 Reza Lotun http://reza.lotun.name/
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
class Request(object):
def __init__(self, connection=None):
self.connection = connection
self.request_id = ''
def __repr__(self):
return 'Request:%s' % self.request_id
def startElement(self, name, attrs, connection):
return None
def endElement(self, name, value, connection):
if name == 'RequestId':
self.request_id = value
else:
setattr(self, name, value)
|
numenta/nupic.research | refs/heads/master | projects/continuous_learning/sigopt_config.py | 3 | # ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2020, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
sigopt_config = dict(
name="GSC_duty_cycle_freezing",
project="continuous_learning",
observation_budget=200,
parallel_bandwidth=1,
parameters=[
dict(
name="cnn1_size",
type="int",
bounds=dict(min=64, max=256)
),
dict(
name="cnn2_size",
type="int",
bounds=dict(min=64, max=256)
),
dict(
name="cnn1_percent_on",
type="double",
bounds=dict(min=0.02, max=0.8)
),
dict(
name="cnn1_wt_sparsity",
type="double",
bounds=dict(min=0.05, max=0.8)
),
dict(
name="cnn2_percent_on",
type="double",
bounds=dict(min=0.02, max=0.8)
),
dict(
name="cnn2_wt_sparsity",
type="double",
bounds=dict(min=0.05, max=0.8)
),
dict(
name="dendrites_per_neuron",
type="int",
bounds=dict(min=1, max=6)
),
dict(
name="learning_rate",
type="double",
bounds=dict(min=0.001, max=0.2)
),
dict(
name="learning_rate_factor",
type="double",
bounds=dict(min=0, max=1)
),
dict(
name="use_batch_norm",
type="categorical",
categorical_values=["True", "False"]
),
dict(
name="log2_batch_size",
type="int",
bounds=dict(min=3, max=7)
),
dict(
name="linear1_n",
type="int",
bounds=dict(min=100, max=2500)
),
dict(
name="linear1_percent_on",
type="double",
bounds=dict(min=0.02, max=0.8)
),
dict(
name="linear1_weight_sparsity",
type="double",
bounds=dict(min=0.01, max=0.8)
),
dict(
name="linear2_percent_on",
type="double",
bounds=dict(min=0.02, max=0.8)
),
dict(
name="linear2_weight_sparsity",
type="double",
bounds=dict(min=0.01, max=0.8)
),
dict(
name="duty_cycle_period",
type="int",
bounds=dict(min=100, max=15000)
),
dict(
name="boost_strength",
type="double",
bounds=dict(min=0.0, max=2.0)
),
dict(
name="boost_strength_factor",
type="double",
bounds=dict(min=0.0, max=1.0)
),
],
metrics=[
dict(
name="area_under_curve",
objective="maximize"
)
]
)
|
googleapis/googleapis-gen | refs/heads/master | google/ads/googleads/v7/googleads-py/google/ads/googleads/v7/services/types/ad_group_extension_setting_service.py | 1 | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
from google.ads.googleads.v7.enums.types import response_content_type as gage_response_content_type
from google.ads.googleads.v7.resources.types import ad_group_extension_setting as gagr_ad_group_extension_setting
from google.protobuf import field_mask_pb2 # type: ignore
from google.rpc import status_pb2 # type: ignore
__protobuf__ = proto.module(
package='google.ads.googleads.v7.services',
marshal='google.ads.googleads.v7',
manifest={
'GetAdGroupExtensionSettingRequest',
'MutateAdGroupExtensionSettingsRequest',
'AdGroupExtensionSettingOperation',
'MutateAdGroupExtensionSettingsResponse',
'MutateAdGroupExtensionSettingResult',
},
)
class GetAdGroupExtensionSettingRequest(proto.Message):
r"""Request message for
[AdGroupExtensionSettingService.GetAdGroupExtensionSetting][google.ads.googleads.v7.services.AdGroupExtensionSettingService.GetAdGroupExtensionSetting].
Attributes:
resource_name (str):
Required. The resource name of the ad group
extension setting to fetch.
"""
resource_name = proto.Field(
proto.STRING,
number=1,
)
class MutateAdGroupExtensionSettingsRequest(proto.Message):
r"""Request message for
[AdGroupExtensionSettingService.MutateAdGroupExtensionSettings][google.ads.googleads.v7.services.AdGroupExtensionSettingService.MutateAdGroupExtensionSettings].
Attributes:
customer_id (str):
Required. The ID of the customer whose ad
group extension settings are being modified.
operations (Sequence[google.ads.googleads.v7.services.types.AdGroupExtensionSettingOperation]):
Required. The list of operations to perform
on individual ad group extension settings.
partial_failure (bool):
If true, successful operations will be
carried out and invalid operations will return
errors. If false, all operations will be carried
out in one transaction if and only if they are
all valid. Default is false.
validate_only (bool):
If true, the request is validated but not
executed. Only errors are returned, not results.
"""
customer_id = proto.Field(
proto.STRING,
number=1,
)
operations = proto.RepeatedField(
proto.MESSAGE,
number=2,
message='AdGroupExtensionSettingOperation',
)
partial_failure = proto.Field(
proto.BOOL,
number=3,
)
validate_only = proto.Field(
proto.BOOL,
number=4,
)
class AdGroupExtensionSettingOperation(proto.Message):
r"""A single operation (create, update, remove) on an ad group
extension setting.
Attributes:
update_mask (google.protobuf.field_mask_pb2.FieldMask):
FieldMask that determines which resource
fields are modified in an update.
response_content_type (google.ads.googleads.v7.enums.types.ResponseContentTypeEnum.ResponseContentType):
The response content type setting. Determines
whether the mutable resource or just the
resource name should be returned post mutation.
create (google.ads.googleads.v7.resources.types.AdGroupExtensionSetting):
Create operation: No resource name is
expected for the new ad group extension setting.
update (google.ads.googleads.v7.resources.types.AdGroupExtensionSetting):
Update operation: The ad group extension
setting is expected to have a valid resource
name.
remove (str):
Remove operation: A resource name for the removed ad group
extension setting is expected, in this format:
``customers/{customer_id}/adGroupExtensionSettings/{ad_group_id}~{extension_type}``
"""
update_mask = proto.Field(
proto.MESSAGE,
number=4,
message=field_mask_pb2.FieldMask,
)
response_content_type = proto.Field(
proto.ENUM,
number=5,
enum=gage_response_content_type.ResponseContentTypeEnum.ResponseContentType,
)
create = proto.Field(
proto.MESSAGE,
number=1,
oneof='operation',
message=gagr_ad_group_extension_setting.AdGroupExtensionSetting,
)
update = proto.Field(
proto.MESSAGE,
number=2,
oneof='operation',
message=gagr_ad_group_extension_setting.AdGroupExtensionSetting,
)
remove = proto.Field(
proto.STRING,
number=3,
oneof='operation',
)
class MutateAdGroupExtensionSettingsResponse(proto.Message):
r"""Response message for an ad group extension setting mutate.
Attributes:
partial_failure_error (google.rpc.status_pb2.Status):
Errors that pertain to operation failures in the partial
failure mode. Returned only when partial_failure = true and
all errors occur inside the operations. If any errors occur
outside the operations (e.g. auth errors), we return an RPC
level error.
results (Sequence[google.ads.googleads.v7.services.types.MutateAdGroupExtensionSettingResult]):
All results for the mutate.
"""
partial_failure_error = proto.Field(
proto.MESSAGE,
number=3,
message=status_pb2.Status,
)
results = proto.RepeatedField(
proto.MESSAGE,
number=2,
message='MutateAdGroupExtensionSettingResult',
)
class MutateAdGroupExtensionSettingResult(proto.Message):
r"""The result for the ad group extension setting mutate.
Attributes:
resource_name (str):
Returned for successful operations.
ad_group_extension_setting (google.ads.googleads.v7.resources.types.AdGroupExtensionSetting):
The mutated AdGroupExtensionSetting with only mutable fields
after mutate. The field will only be returned when
response_content_type is set to "MUTABLE_RESOURCE".
"""
resource_name = proto.Field(
proto.STRING,
number=1,
)
ad_group_extension_setting = proto.Field(
proto.MESSAGE,
number=2,
message=gagr_ad_group_extension_setting.AdGroupExtensionSetting,
)
__all__ = tuple(sorted(__protobuf__.manifest))
|
huguesv/PTVS | refs/heads/master | Python/Product/Miniconda/Miniconda3-x64/Lib/distutils/tests/test_cygwinccompiler.py | 25 | """Tests for distutils.cygwinccompiler."""
import unittest
import sys
import os
from io import BytesIO
from test.support import run_unittest
from distutils import cygwinccompiler
from distutils.cygwinccompiler import (check_config_h,
CONFIG_H_OK, CONFIG_H_NOTOK,
CONFIG_H_UNCERTAIN, get_versions,
get_msvcr)
from distutils.tests import support
class FakePopen(object):
test_class = None
def __init__(self, cmd, shell, stdout):
self.cmd = cmd.split()[0]
exes = self.test_class._exes
if self.cmd in exes:
# issue #6438 in Python 3.x, Popen returns bytes
self.stdout = BytesIO(exes[self.cmd])
else:
self.stdout = os.popen(cmd, 'r')
class CygwinCCompilerTestCase(support.TempdirManager,
unittest.TestCase):
def setUp(self):
super(CygwinCCompilerTestCase, self).setUp()
self.version = sys.version
self.python_h = os.path.join(self.mkdtemp(), 'python.h')
from distutils import sysconfig
self.old_get_config_h_filename = sysconfig.get_config_h_filename
sysconfig.get_config_h_filename = self._get_config_h_filename
self.old_find_executable = cygwinccompiler.find_executable
cygwinccompiler.find_executable = self._find_executable
self._exes = {}
self.old_popen = cygwinccompiler.Popen
FakePopen.test_class = self
cygwinccompiler.Popen = FakePopen
def tearDown(self):
sys.version = self.version
from distutils import sysconfig
sysconfig.get_config_h_filename = self.old_get_config_h_filename
cygwinccompiler.find_executable = self.old_find_executable
cygwinccompiler.Popen = self.old_popen
super(CygwinCCompilerTestCase, self).tearDown()
def _get_config_h_filename(self):
return self.python_h
def _find_executable(self, name):
if name in self._exes:
return name
return None
def test_check_config_h(self):
# check_config_h looks for "GCC" in sys.version first
# returns CONFIG_H_OK if found
sys.version = ('2.6.1 (r261:67515, Dec 6 2008, 16:42:21) \n[GCC '
'4.0.1 (Apple Computer, Inc. build 5370)]')
self.assertEqual(check_config_h()[0], CONFIG_H_OK)
# then it tries to see if it can find "__GNUC__" in pyconfig.h
sys.version = 'something without the *CC word'
# if the file doesn't exist it returns CONFIG_H_UNCERTAIN
self.assertEqual(check_config_h()[0], CONFIG_H_UNCERTAIN)
# if it exists but does not contain __GNUC__, it returns CONFIG_H_NOTOK
self.write_file(self.python_h, 'xxx')
self.assertEqual(check_config_h()[0], CONFIG_H_NOTOK)
# and CONFIG_H_OK if __GNUC__ is found
self.write_file(self.python_h, 'xxx __GNUC__ xxx')
self.assertEqual(check_config_h()[0], CONFIG_H_OK)
def test_get_versions(self):
# get_versions calls distutils.spawn.find_executable on
# 'gcc', 'ld' and 'dllwrap'
self.assertEqual(get_versions(), (None, None, None))
# Let's fake we have 'gcc' and it returns '3.4.5'
self._exes['gcc'] = b'gcc (GCC) 3.4.5 (mingw special)\nFSF'
res = get_versions()
self.assertEqual(str(res[0]), '3.4.5')
# and let's see what happens when the version
# doesn't match the regular expression
# (\d+\.\d+(\.\d+)*)
self._exes['gcc'] = b'very strange output'
res = get_versions()
self.assertEqual(res[0], None)
# same thing for ld
self._exes['ld'] = b'GNU ld version 2.17.50 20060824'
res = get_versions()
self.assertEqual(str(res[1]), '2.17.50')
self._exes['ld'] = b'@(#)PROGRAM:ld PROJECT:ld64-77'
res = get_versions()
self.assertEqual(res[1], None)
# and dllwrap
self._exes['dllwrap'] = b'GNU dllwrap 2.17.50 20060824\nFSF'
res = get_versions()
self.assertEqual(str(res[2]), '2.17.50')
self._exes['dllwrap'] = b'Cheese Wrap'
res = get_versions()
self.assertEqual(res[2], None)
def test_get_msvcr(self):
# none
sys.version = ('2.6.1 (r261:67515, Dec 6 2008, 16:42:21) '
'\n[GCC 4.0.1 (Apple Computer, Inc. build 5370)]')
self.assertEqual(get_msvcr(), None)
# MSVC 7.0
sys.version = ('2.5.1 (r251:54863, Apr 18 2007, 08:51:08) '
'[MSC v.1300 32 bits (Intel)]')
self.assertEqual(get_msvcr(), ['msvcr70'])
# MSVC 7.1
sys.version = ('2.5.1 (r251:54863, Apr 18 2007, 08:51:08) '
'[MSC v.1310 32 bits (Intel)]')
self.assertEqual(get_msvcr(), ['msvcr71'])
# VS2005 / MSVC 8.0
sys.version = ('2.5.1 (r251:54863, Apr 18 2007, 08:51:08) '
'[MSC v.1400 32 bits (Intel)]')
self.assertEqual(get_msvcr(), ['msvcr80'])
# VS2008 / MSVC 9.0
sys.version = ('2.5.1 (r251:54863, Apr 18 2007, 08:51:08) '
'[MSC v.1500 32 bits (Intel)]')
self.assertEqual(get_msvcr(), ['msvcr90'])
# unknown
sys.version = ('2.5.1 (r251:54863, Apr 18 2007, 08:51:08) '
'[MSC v.1999 32 bits (Intel)]')
self.assertRaises(ValueError, get_msvcr)
def test_suite():
return unittest.makeSuite(CygwinCCompilerTestCase)
if __name__ == '__main__':
run_unittest(test_suite())
|
HomeRad/TorCleaner | refs/heads/master | tests/http/test_date.py | 1 | # -*- coding: iso-8859-1 -*-
# Copyright (C) 2005-2010 Bastian Kleineidam
import unittest
import time
import wc.http.date
class TestDate(unittest.TestCase):
def test_rfc1123(self):
now = time.time()
wc.http.date.get_date_rfc1123(now)
s = "Sat, 12 Feb 0005 11:12:13 GMT"
t = (0005, 2, 12, 11, 12, 13, 5, 43, 0)
self.assertEqual(wc.http.date.parse_date_rfc1123(s), t)
s = "Sat, 01 Nov 2099 01:02:03 GMT"
t = (2099, 11, 1, 1, 2, 3, 5, 305, 0)
self.assertEqual(wc.http.date.parse_date_rfc1123(s), t)
s = "Tue, 99 Feb 2000 12:13:14 GMT"
self.assertRaises(ValueError, wc.http.date.parse_date_rfc1123, s)
def test_rfc850(self):
now = time.time()
wc.http.date.get_date_rfc850(now)
s = "Saturday, 12-Feb-09 11:12:13 GMT"
t = (2009, 2, 12, 11, 12, 13, 5, 43, 0)
self.assertEqual(wc.http.date.parse_date_rfc850(s), t)
s = "Saturday, 01-Nov-05 01:02:03 GMT"
t = (2005, 11, 1, 1, 2, 3, 5, 305, 0)
self.assertEqual(wc.http.date.parse_date_rfc850(s), t)
s = "Tuesday, 99-Feb-98 12:13:14 GMT"
self.assertRaises(ValueError, wc.http.date.parse_date_rfc850, s)
def test_asctime(self):
now = time.time()
wc.http.date.get_date_asctime(now)
s = "Sat Feb 12 11:12:13 2005"
t = (2005, 2, 12, 11, 12, 13, 5, 43, 0)
self.assertEqual(wc.http.date.parse_date_asctime(s), t)
s = "Sat Nov 1 01:02:03 2099"
t = (2099, 11, 1, 1, 2, 3, 5, 305, 0)
self.assertEqual(wc.http.date.parse_date_asctime(s), t)
s = "Tue Feb 99 12:13:14 2000"
self.assertRaises(ValueError, wc.http.date.parse_date_asctime, s)
|
ngi644/pymimamori | refs/heads/master | setup.py | 1 | # encoding: utf-8
"""
Created by nagai at 15/04/21
"""
__author__ = 'nagai'
from setuptools import setup, find_packages
setup(
name='mimamori',
description='Mimamori',
author='Takashi Nagai',
author_email='[email protected]',
url='',
version='0.1.0',
license='AGPL-3.0',
keywords=['ble',],
packages=find_packages(),
include_package_data=True,
install_requires=[
'gattlib',
'datadog',
'docopt'
],
classifiers=[
# https://pypi.python.org/pypi?%3Aaction=list_classifiers
'Intended Audience :: Developers',
'Intended Audience :: Education',
'License :: OSI Approved :: GNU Affero General Public License v3',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Education',
'Topic :: Internet :: WWW/HTTP',
],
)
|
huanghao/mic | refs/heads/master | mic/utils/grabber.py | 6 | #!/usr/bin/python
import os
import sys
import fcntl
import struct
import termios
from mic import msger
from mic.utils import runner
from mic.utils.errors import CreatorError
from mic.utils.safeurl import SafeURL
from urlgrabber import grabber
from urlgrabber import __version__ as grabber_version
def myurlgrab(url, filename, proxies, progress_obj = None):
g = grabber.URLGrabber()
if progress_obj is None:
progress_obj = TextProgress()
if url.startswith("file:/"):
filepath = "/%s" % url.replace("file:", "").lstrip('/')
if not os.path.exists(filepath):
raise CreatorError("URLGrabber error: can't find file %s" % url)
if url.endswith('.rpm'):
return filepath
else:
# untouch repometadata in source path
runner.show(['cp', '-f', filepath, filename])
else:
try:
# cast url to str here, sometimes it can be unicode,
# but pycurl only accept str
filename = g.urlgrab(url=str(url),
filename=filename,
ssl_verify_host=False,
ssl_verify_peer=False,
proxies=proxies,
http_headers=(('Pragma', 'no-cache'),),
quote=0,
progress_obj=progress_obj)
except grabber.URLGrabError, err:
tmp = SafeURL(url)
msg = str(err)
if msg.find(url) < 0:
msg += ' on %s' % tmp
else:
msg = msg.replace(url, tmp)
raise CreatorError(msg)
return filename
def terminal_width(fd=1):
""" Get the real terminal width """
try:
buf = 'abcdefgh'
buf = fcntl.ioctl(fd, termios.TIOCGWINSZ, buf)
return struct.unpack('hhhh', buf)[1]
except: # IOError
return 80
def truncate_url(url, width):
return os.path.basename(url)[0:width]
class TextProgress(object):
# make the class as singleton
_instance = None
def __new__(cls, *args, **kwargs):
if not cls._instance:
cls._instance = super(TextProgress, cls).__new__(cls, *args, **kwargs)
return cls._instance
def __init__(self, totalnum = None):
self.total = totalnum
self.counter = 1
def start(self, filename, url, *args, **kwargs):
self.url = url
self.termwidth = terminal_width()
if self.total is None:
msger.info("Retrieving %s ..." % truncate_url(self.url, self.termwidth - 15))
else:
msger.info("Retrieving %s [%d/%d] ..." % (truncate_url(self.url, self.termwidth - 25), self.counter, self.total))
def update(self, *args):
pass
def end(self, *args):
if self.counter == self.total:
msger.raw("\n")
if self.total is not None:
self.counter += 1
|
kylewalters18/bcycle_api | refs/heads/master | tests/test_api.py | 1 | import unittest
import json
from collections import namedtuple
from datetime import datetime
from unittest import mock
from bcycle import app
MockTrip = namedtuple('trip', ['id', 'bike_id', 'duration', 'checkout_kiosk', 'checkout_datetime',
'return_kiosk', 'return_datetime'])
MockRider = namedtuple('rider', ['id', 'program', 'zip_code', 'membership_type', 'trips'])
DictContainer = namedtuple('container', 'to_dict')
ItemContainer = namedtuple('item_container', 'items')
class ApiTestCase(unittest.TestCase):
def setUp(self):
self.app = app.test_client()
def test_404_not_found(self):
rv = self.app.get('/this_route_doesnt_exists')
json_rv = json.loads(rv.data.decode('UTF-8'))
self.assertEqual(json_rv['status'], 404)
self.assertEqual(json_rv['error'], 'not found')
self.assertEqual(json_rv['message'], 'invalid resource URI')
class TripTestCase(unittest.TestCase):
def setUp(self):
self.app = app.test_client()
def _verify_trip(self, actual, expected):
self.assertEqual(actual['id'], expected.id)
self.assertEqual(actual['bike_id'], expected.bike_id)
self.assertEqual(actual['duration'], expected.duration)
self.assertEqual(actual['checkout_kiosk'], expected.checkout_kiosk)
self.assertEqual(actual['checkout_datetime'], expected.checkout_datetime)
self.assertEqual(actual['return_kiosk'], expected.return_kiosk)
self.assertEqual(actual['return_datetime'], expected.return_datetime)
@mock.patch('bcycle.v1.endpoints.Trip')
def test_get_trips_endpoint(self, mock_trip):
test_time = datetime.now().isoformat()
trip = MockTrip(0, 1, 30, 'Main Street', test_time, '1st Ave', test_time)
mock_trip.query.paginate.return_value = ItemContainer(
[DictContainer(lambda: trip._asdict())]
)
mock_trip.query.count.return_value = 1
rv = self.app.get('/v1/trip')
response_data = json.loads(rv.data.decode('UTF-8'))
self.assertEqual(len(response_data['trips']), 1)
response_trip = response_data['trips'][0]
self._verify_trip(response_trip, trip)
@mock.patch('bcycle.v1.endpoints.Trip')
def test_no_such_trip_endpoint(self, mock_trip):
mock_trip.query.get.return_value = None
rv = self.app.get('/v1/trip/0')
response_data = json.loads(rv.data.decode('UTF-8'))
self.assertEqual(response_data['error'], 'resource does not exist')
@mock.patch('bcycle.v1.endpoints.Trip')
def test_trip_endpoint(self, mock_trip):
test_time = datetime.now().isoformat()
trip = MockTrip(0, 1, 30, 'Main Street', test_time, '1st Ave', test_time)
mock_trip.query.get.return_value = DictContainer(lambda: trip._asdict())
rv = self.app.get('/v1/trip/0')
response_trip = json.loads(rv.data.decode('UTF-8'))
self._verify_trip(response_trip, trip)
class RiderTestCase(unittest.TestCase):
def setUp(self):
self.app = app.test_client()
def _verify_rider(self, actual, expected):
self.assertEqual(actual['id'], expected.id)
self.assertEqual(actual['program'], expected.program)
self.assertEqual(actual['zip_code'], expected.zip_code)
self.assertEqual(actual['membership_type'], expected.membership_type)
self.assertEqual(actual['trips'], expected.trips)
@mock.patch('bcycle.v1.endpoints.Rider')
def test_get_riders(self, mock_rider):
rider = MockRider(0, 'Denver B Cycle', 80202, 'annual', [])
mock_rider.query.paginate.return_value = ItemContainer(
[DictContainer(lambda: rider._asdict())]
)
mock_rider.query.count.return_value = 1
rv = self.app.get('/v1/rider')
response_data = json.loads(rv.data.decode('UTF-8'))
self.assertEqual(len(response_data['riders']), 1)
response_rider = response_data['riders'][0]
self._verify_rider(response_rider, rider)
@mock.patch('bcycle.v1.endpoints.Rider')
def test_no_such_trip_endpoint(self, mock_rider):
mock_rider.query.get.return_value = None
rv = self.app.get('/v1/rider/0')
response_data = json.loads(rv.data.decode('UTF-8'))
self.assertEqual(response_data['error'], 'resource does not exist')
@mock.patch('bcycle.v1.endpoints.Rider')
def test_trip_endpoint(self, mock_rider):
rider = MockRider(0, 'Denver B Cycle', 80202, 'annual', [])
mock_rider.query.get.return_value = DictContainer(lambda: rider._asdict())
rv = self.app.get('/v1/rider/0')
response_data = json.loads(rv.data.decode('UTF-8'))
self._verify_rider(response_data, rider)
|
olapaola/olapaola-android-scripting | refs/heads/master | python-build/python-libs/gdata/src/atom/http_interface.py | 133 | #!/usr/bin/python
#
# Copyright (C) 2008 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This module provides a common interface for all HTTP requests.
HttpResponse: Represents the server's response to an HTTP request. Provides
an interface identical to httplib.HTTPResponse which is the response
expected from higher level classes which use HttpClient.request.
GenericHttpClient: Provides an interface (superclass) for an object
responsible for making HTTP requests. Subclasses of this object are
used in AtomService and GDataService to make requests to the server. By
changing the http_client member object, the AtomService is able to make
HTTP requests using different logic (for example, when running on
Google App Engine, the http_client makes requests using the App Engine
urlfetch API).
"""
__author__ = 'api.jscudder (Jeff Scudder)'
import StringIO
USER_AGENT = '%s GData-Python/1.3.3'
class Error(Exception):
pass
class UnparsableUrlObject(Error):
pass
class ContentLengthRequired(Error):
pass
class HttpResponse(object):
def __init__(self, body=None, status=None, reason=None, headers=None):
"""Constructor for an HttpResponse object.
HttpResponse represents the server's response to an HTTP request from
the client. The HttpClient.request method returns a httplib.HTTPResponse
object and this HttpResponse class is designed to mirror the interface
exposed by httplib.HTTPResponse.
Args:
body: A file like object, with a read() method. The body could also
be a string, and the constructor will wrap it so that
HttpResponse.read(self) will return the full string.
status: The HTTP status code as an int. Example: 200, 201, 404.
reason: The HTTP status message which follows the code. Example:
OK, Created, Not Found
headers: A dictionary containing the HTTP headers in the server's
response. A common header in the response is Content-Length.
"""
if body:
if hasattr(body, 'read'):
self._body = body
else:
self._body = StringIO.StringIO(body)
else:
self._body = None
if status is not None:
self.status = int(status)
else:
self.status = None
self.reason = reason
self._headers = headers or {}
def getheader(self, name, default=None):
if name in self._headers:
return self._headers[name]
else:
return default
def read(self, amt=None):
if not amt:
return self._body.read()
else:
return self._body.read(amt)
class GenericHttpClient(object):
debug = False
def __init__(self, http_client, headers=None):
"""
Args:
http_client: An object which provides a request method to make an HTTP
request. The request method in GenericHttpClient performs a
call-through to the contained HTTP client object.
headers: A dictionary containing HTTP headers which should be included
in every HTTP request. Common persistent headers include
'User-Agent'.
"""
self.http_client = http_client
self.headers = headers or {}
def request(self, operation, url, data=None, headers=None):
all_headers = self.headers.copy()
if headers:
all_headers.update(headers)
return self.http_client.request(operation, url, data=data,
headers=all_headers)
def get(self, url, headers=None):
return self.request('GET', url, headers=headers)
def post(self, url, data, headers=None):
return self.request('POST', url, data=data, headers=headers)
def put(self, url, data, headers=None):
return self.request('PUT', url, data=data, headers=headers)
def delete(self, url, headers=None):
return self.request('DELETE', url, headers=headers)
class GenericToken(object):
"""Represents an Authorization token to be added to HTTP requests.
Some Authorization headers included calculated fields (digital
signatures for example) which are based on the parameters of the HTTP
request. Therefore the token is responsible for signing the request
and adding the Authorization header.
"""
def perform_request(self, http_client, operation, url, data=None,
headers=None):
"""For the GenericToken, no Authorization token is set."""
return http_client.request(operation, url, data=data, headers=headers)
def valid_for_scope(self, url):
"""Tells the caller if the token authorizes access to the desired URL.
Since the generic token doesn't add an auth header, it is not valid for
any scope.
"""
return False
|
mancoast/CPythonPyc_test | refs/heads/master | fail/330_test_poplib.py | 7 | """Test script for poplib module."""
# Modified by Giampaolo Rodola' to give poplib.POP3 and poplib.POP3_SSL
# a real test suite
import poplib
import asyncore
import asynchat
import socket
import os
import time
import errno
from unittest import TestCase
from test import support as test_support
threading = test_support.import_module('threading')
HOST = test_support.HOST
PORT = 0
# the dummy data returned by server when LIST and RETR commands are issued
LIST_RESP = b'1 1\r\n2 2\r\n3 3\r\n4 4\r\n5 5\r\n.\r\n'
RETR_RESP = b"""From: [email protected]\
\r\nContent-Type: text/plain\r\n\
MIME-Version: 1.0\r\n\
Subject: Dummy\r\n\
\r\n\
line1\r\n\
line2\r\n\
line3\r\n\
.\r\n"""
class DummyPOP3Handler(asynchat.async_chat):
def __init__(self, conn):
asynchat.async_chat.__init__(self, conn)
self.set_terminator(b"\r\n")
self.in_buffer = []
self.push('+OK dummy pop3 server ready. <timestamp>')
def collect_incoming_data(self, data):
self.in_buffer.append(data)
def found_terminator(self):
line = b''.join(self.in_buffer)
line = str(line, 'ISO-8859-1')
self.in_buffer = []
cmd = line.split(' ')[0].lower()
space = line.find(' ')
if space != -1:
arg = line[space + 1:]
else:
arg = ""
if hasattr(self, 'cmd_' + cmd):
method = getattr(self, 'cmd_' + cmd)
method(arg)
else:
self.push('-ERR unrecognized POP3 command "%s".' %cmd)
def handle_error(self):
raise
def push(self, data):
asynchat.async_chat.push(self, data.encode("ISO-8859-1") + b'\r\n')
def cmd_echo(self, arg):
# sends back the received string (used by the test suite)
self.push(arg)
def cmd_user(self, arg):
if arg != "guido":
self.push("-ERR no such user")
self.push('+OK password required')
def cmd_pass(self, arg):
if arg != "python":
self.push("-ERR wrong password")
self.push('+OK 10 messages')
def cmd_stat(self, arg):
self.push('+OK 10 100')
def cmd_list(self, arg):
if arg:
self.push('+OK %s %s' %(arg, arg))
else:
self.push('+OK')
asynchat.async_chat.push(self, LIST_RESP)
cmd_uidl = cmd_list
def cmd_retr(self, arg):
self.push('+OK %s bytes' %len(RETR_RESP))
asynchat.async_chat.push(self, RETR_RESP)
cmd_top = cmd_retr
def cmd_dele(self, arg):
self.push('+OK message marked for deletion.')
def cmd_noop(self, arg):
self.push('+OK done nothing.')
def cmd_rpop(self, arg):
self.push('+OK done nothing.')
def cmd_apop(self, arg):
self.push('+OK done nothing.')
def cmd_quit(self, arg):
self.push('+OK closing.')
self.close_when_done()
class DummyPOP3Server(asyncore.dispatcher, threading.Thread):
handler = DummyPOP3Handler
def __init__(self, address, af=socket.AF_INET):
threading.Thread.__init__(self)
asyncore.dispatcher.__init__(self)
self.create_socket(af, socket.SOCK_STREAM)
self.bind(address)
self.listen(5)
self.active = False
self.active_lock = threading.Lock()
self.host, self.port = self.socket.getsockname()[:2]
self.handler_instance = None
def start(self):
assert not self.active
self.__flag = threading.Event()
threading.Thread.start(self)
self.__flag.wait()
def run(self):
self.active = True
self.__flag.set()
while self.active and asyncore.socket_map:
self.active_lock.acquire()
asyncore.loop(timeout=0.1, count=1)
self.active_lock.release()
asyncore.close_all(ignore_all=True)
def stop(self):
assert self.active
self.active = False
self.join()
def handle_accepted(self, conn, addr):
self.handler_instance = self.handler(conn)
def handle_connect(self):
self.close()
handle_read = handle_connect
def writable(self):
return 0
def handle_error(self):
raise
class TestPOP3Class(TestCase):
def assertOK(self, resp):
self.assertTrue(resp.startswith(b"+OK"))
def setUp(self):
self.server = DummyPOP3Server((HOST, PORT))
self.server.start()
self.client = poplib.POP3(self.server.host, self.server.port, timeout=3)
def tearDown(self):
self.client.close()
self.server.stop()
def test_getwelcome(self):
self.assertEqual(self.client.getwelcome(),
b'+OK dummy pop3 server ready. <timestamp>')
def test_exceptions(self):
self.assertRaises(poplib.error_proto, self.client._shortcmd, 'echo -err')
def test_user(self):
self.assertOK(self.client.user('guido'))
self.assertRaises(poplib.error_proto, self.client.user, 'invalid')
def test_pass_(self):
self.assertOK(self.client.pass_('python'))
self.assertRaises(poplib.error_proto, self.client.user, 'invalid')
def test_stat(self):
self.assertEqual(self.client.stat(), (10, 100))
def test_list(self):
self.assertEqual(self.client.list()[1:],
([b'1 1', b'2 2', b'3 3', b'4 4', b'5 5'],
25))
self.assertTrue(self.client.list('1').endswith(b"OK 1 1"))
def test_retr(self):
expected = (b'+OK 116 bytes',
[b'From: [email protected]', b'Content-Type: text/plain',
b'MIME-Version: 1.0', b'Subject: Dummy',
b'', b'line1', b'line2', b'line3'],
113)
foo = self.client.retr('foo')
self.assertEqual(foo, expected)
def test_dele(self):
self.assertOK(self.client.dele('foo'))
def test_noop(self):
self.assertOK(self.client.noop())
def test_rpop(self):
self.assertOK(self.client.rpop('foo'))
def test_apop(self):
self.assertOK(self.client.apop('foo', 'dummypassword'))
def test_top(self):
expected = (b'+OK 116 bytes',
[b'From: [email protected]', b'Content-Type: text/plain',
b'MIME-Version: 1.0', b'Subject: Dummy', b'',
b'line1', b'line2', b'line3'],
113)
self.assertEqual(self.client.top(1, 1), expected)
def test_uidl(self):
self.client.uidl()
self.client.uidl('foo')
def test_quit(self):
resp = self.client.quit()
self.assertTrue(resp)
self.assertIsNone(self.client.sock)
self.assertIsNone(self.client.file)
SUPPORTS_SSL = False
if hasattr(poplib, 'POP3_SSL'):
import ssl
SUPPORTS_SSL = True
CERTFILE = os.path.join(os.path.dirname(__file__) or os.curdir, "keycert.pem")
class DummyPOP3_SSLHandler(DummyPOP3Handler):
def __init__(self, conn):
asynchat.async_chat.__init__(self, conn)
ssl_socket = ssl.wrap_socket(self.socket, certfile=CERTFILE,
server_side=True,
do_handshake_on_connect=False)
self.del_channel()
self.set_socket(ssl_socket)
# Must try handshake before calling push()
self._ssl_accepting = True
self._do_ssl_handshake()
self.set_terminator(b"\r\n")
self.in_buffer = []
self.push('+OK dummy pop3 server ready. <timestamp>')
def _do_ssl_handshake(self):
try:
self.socket.do_handshake()
except ssl.SSLError as err:
if err.args[0] in (ssl.SSL_ERROR_WANT_READ,
ssl.SSL_ERROR_WANT_WRITE):
return
elif err.args[0] == ssl.SSL_ERROR_EOF:
return self.handle_close()
raise
except socket.error as err:
if err.args[0] == errno.ECONNABORTED:
return self.handle_close()
else:
self._ssl_accepting = False
def handle_read(self):
if self._ssl_accepting:
self._do_ssl_handshake()
else:
DummyPOP3Handler.handle_read(self)
class TestPOP3_SSLClass(TestPOP3Class):
# repeat previous tests by using poplib.POP3_SSL
def setUp(self):
self.server = DummyPOP3Server((HOST, PORT))
self.server.handler = DummyPOP3_SSLHandler
self.server.start()
self.client = poplib.POP3_SSL(self.server.host, self.server.port)
def test__all__(self):
self.assertIn('POP3_SSL', poplib.__all__)
def test_context(self):
ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
self.assertRaises(ValueError, poplib.POP3_SSL, self.server.host,
self.server.port, keyfile=CERTFILE, context=ctx)
self.assertRaises(ValueError, poplib.POP3_SSL, self.server.host,
self.server.port, certfile=CERTFILE, context=ctx)
self.assertRaises(ValueError, poplib.POP3_SSL, self.server.host,
self.server.port, keyfile=CERTFILE,
certfile=CERTFILE, context=ctx)
self.client.quit()
self.client = poplib.POP3_SSL(self.server.host, self.server.port,
context=ctx)
self.assertIsInstance(self.client.sock, ssl.SSLSocket)
self.assertIs(self.client.sock.context, ctx)
self.assertTrue(self.client.noop().startswith(b'+OK'))
class TestTimeouts(TestCase):
def setUp(self):
self.evt = threading.Event()
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.sock.settimeout(60) # Safety net. Look issue 11812
self.port = test_support.bind_port(self.sock)
self.thread = threading.Thread(target=self.server, args=(self.evt,self.sock))
self.thread.setDaemon(True)
self.thread.start()
self.evt.wait()
def tearDown(self):
self.thread.join()
del self.thread # Clear out any dangling Thread objects.
def server(self, evt, serv):
serv.listen(5)
evt.set()
try:
conn, addr = serv.accept()
conn.send(b"+ Hola mundo\n")
conn.close()
except socket.timeout:
pass
finally:
serv.close()
def testTimeoutDefault(self):
self.assertTrue(socket.getdefaulttimeout() is None)
socket.setdefaulttimeout(30)
try:
pop = poplib.POP3(HOST, self.port)
finally:
socket.setdefaulttimeout(None)
self.assertEqual(pop.sock.gettimeout(), 30)
pop.sock.close()
def testTimeoutNone(self):
self.assertTrue(socket.getdefaulttimeout() is None)
socket.setdefaulttimeout(30)
try:
pop = poplib.POP3(HOST, self.port, timeout=None)
finally:
socket.setdefaulttimeout(None)
self.assertTrue(pop.sock.gettimeout() is None)
pop.sock.close()
def testTimeoutValue(self):
pop = poplib.POP3(HOST, self.port, timeout=30)
self.assertEqual(pop.sock.gettimeout(), 30)
pop.sock.close()
def test_main():
tests = [TestPOP3Class, TestTimeouts]
if SUPPORTS_SSL:
tests.append(TestPOP3_SSLClass)
thread_info = test_support.threading_setup()
try:
test_support.run_unittest(*tests)
finally:
test_support.threading_cleanup(*thread_info)
if __name__ == '__main__':
test_main()
|
aptivate/djangocms_events | refs/heads/master | tests/events/models.py | 1 | from __future__ import absolute_import, unicode_literals
from django.contrib.contenttypes.models import ContentType
from django.core.urlresolvers import reverse
from django.db import models
from django.utils.translation import ugettext as _
from cms.models.pluginmodel import CMSPlugin
class Wossname(models.Model):
name = models.CharField(max_length=255)
owner = models.ForeignKey(ContentType)
def __unicode__(self):
return unicode(self.name)
class Event(models.Model):
title = models.CharField(max_length=255)
description = models.TextField()
additional_information = models.TextField(null=True, blank=True)
event_type = models.ForeignKey(Wossname, null=True)
location = models.CharField(max_length=255, null=True, blank=True)
attendees = models.CharField(max_length=512, null=True, blank=True)
event_url = models.URLField(null=True, blank=True)
contact_name = models.CharField(max_length=255, null=True, blank=True)
contact_email = models.EmailField(null=True, blank=True)
contact_phone = models.CharField(max_length=255, null=True, blank=True)
# tags = TagAutocompleteField(null=True, blank=True)
start_date = models.DateField()
end_date = models.DateField(null=True, blank=True)
def __unicode__(self):
return unicode(self.title)
def get_absolute_url(self):
return reverse('event_detail', args=[self.id])
|
DeMille/emailhooks | refs/heads/master | django_nonrel/django/contrib/localflavor/ca/forms.py | 100 | """
Canada-specific Form helpers
"""
from __future__ import absolute_import, unicode_literals
import re
from django.core.validators import EMPTY_VALUES
from django.forms import ValidationError
from django.forms.fields import Field, CharField, Select
from django.utils.encoding import smart_text
from django.utils.translation import ugettext_lazy as _
phone_digits_re = re.compile(r'^(?:1-?)?(\d{3})[-\.]?(\d{3})[-\.]?(\d{4})$')
sin_re = re.compile(r"^(\d{3})-(\d{3})-(\d{3})$")
class CAPostalCodeField(CharField):
"""
Canadian postal code field.
Validates against known invalid characters: D, F, I, O, Q, U
Additionally the first character cannot be Z or W.
For more info see:
http://www.canadapost.ca/tools/pg/manual/PGaddress-e.asp#1402170
"""
default_error_messages = {
'invalid': _('Enter a postal code in the format XXX XXX.'),
}
postcode_regex = re.compile(r'^([ABCEGHJKLMNPRSTVXY]\d[ABCEGHJKLMNPRSTVWXYZ]) *(\d[ABCEGHJKLMNPRSTVWXYZ]\d)$')
def clean(self, value):
value = super(CAPostalCodeField, self).clean(value)
if value in EMPTY_VALUES:
return ''
postcode = value.upper().strip()
m = self.postcode_regex.match(postcode)
if not m:
raise ValidationError(self.default_error_messages['invalid'])
return "%s %s" % (m.group(1), m.group(2))
class CAPhoneNumberField(Field):
"""Canadian phone number field."""
default_error_messages = {
'invalid': _('Phone numbers must be in XXX-XXX-XXXX format.'),
}
def clean(self, value):
"""Validate a phone number.
"""
super(CAPhoneNumberField, self).clean(value)
if value in EMPTY_VALUES:
return ''
value = re.sub('(\(|\)|\s+)', '', smart_text(value))
m = phone_digits_re.search(value)
if m:
return '%s-%s-%s' % (m.group(1), m.group(2), m.group(3))
raise ValidationError(self.error_messages['invalid'])
class CAProvinceField(Field):
"""
A form field that validates its input is a Canadian province name or abbreviation.
It normalizes the input to the standard two-leter postal service
abbreviation for the given province.
"""
default_error_messages = {
'invalid': _('Enter a Canadian province or territory.'),
}
def clean(self, value):
super(CAProvinceField, self).clean(value)
if value in EMPTY_VALUES:
return ''
try:
value = value.strip().lower()
except AttributeError:
pass
else:
# Load data in memory only when it is required, see also #17275
from .ca_provinces import PROVINCES_NORMALIZED
try:
return PROVINCES_NORMALIZED[value.strip().lower()]
except KeyError:
pass
raise ValidationError(self.error_messages['invalid'])
class CAProvinceSelect(Select):
"""
A Select widget that uses a list of Canadian provinces and
territories as its choices.
"""
def __init__(self, attrs=None):
# Load data in memory only when it is required, see also #17275
from .ca_provinces import PROVINCE_CHOICES
super(CAProvinceSelect, self).__init__(attrs, choices=PROVINCE_CHOICES)
class CASocialInsuranceNumberField(Field):
"""
A Canadian Social Insurance Number (SIN).
Checks the following rules to determine whether the number is valid:
* Conforms to the XXX-XXX-XXX format.
* Passes the check digit process "Luhn Algorithm"
See: http://en.wikipedia.org/wiki/Social_Insurance_Number
"""
default_error_messages = {
'invalid': _('Enter a valid Canadian Social Insurance number in XXX-XXX-XXX format.'),
}
def clean(self, value):
super(CASocialInsuranceNumberField, self).clean(value)
if value in EMPTY_VALUES:
return ''
match = re.match(sin_re, value)
if not match:
raise ValidationError(self.error_messages['invalid'])
number = '%s-%s-%s' % (match.group(1), match.group(2), match.group(3))
check_number = '%s%s%s' % (match.group(1), match.group(2), match.group(3))
if not self.luhn_checksum_is_valid(check_number):
raise ValidationError(self.error_messages['invalid'])
return number
def luhn_checksum_is_valid(self, number):
"""
Checks to make sure that the SIN passes a luhn mod-10 checksum
See: http://en.wikipedia.org/wiki/Luhn_algorithm
"""
sum = 0
num_digits = len(number)
oddeven = num_digits & 1
for count in range(0, num_digits):
digit = int(number[count])
if not (( count & 1 ) ^ oddeven ):
digit = digit * 2
if digit > 9:
digit = digit - 9
sum = sum + digit
return ( (sum % 10) == 0 )
|
ProfessionalIT/maxigenios-website | refs/heads/master | sdk/google_appengine/lib/django-0.96/django/db/backends/mysql/introspection.py | 32 | from django.db.backends.mysql.base import quote_name
from MySQLdb import ProgrammingError, OperationalError
from MySQLdb.constants import FIELD_TYPE
import re
foreign_key_re = re.compile(r"\sCONSTRAINT `[^`]*` FOREIGN KEY \(`([^`]*)`\) REFERENCES `([^`]*)` \(`([^`]*)`\)")
def get_table_list(cursor):
"Returns a list of table names in the current database."
cursor.execute("SHOW TABLES")
return [row[0] for row in cursor.fetchall()]
def get_table_description(cursor, table_name):
"Returns a description of the table, with the DB-API cursor.description interface."
cursor.execute("SELECT * FROM %s LIMIT 1" % quote_name(table_name))
return cursor.description
def _name_to_index(cursor, table_name):
"""
Returns a dictionary of {field_name: field_index} for the given table.
Indexes are 0-based.
"""
return dict([(d[0], i) for i, d in enumerate(get_table_description(cursor, table_name))])
def get_relations(cursor, table_name):
"""
Returns a dictionary of {field_index: (field_index_other_table, other_table)}
representing all relationships to the given table. Indexes are 0-based.
"""
my_field_dict = _name_to_index(cursor, table_name)
constraints = []
relations = {}
try:
# This should work for MySQL 5.0.
cursor.execute("""
SELECT column_name, referenced_table_name, referenced_column_name
FROM information_schema.key_column_usage
WHERE table_name = %s
AND table_schema = DATABASE()
AND referenced_table_name IS NOT NULL
AND referenced_column_name IS NOT NULL""", [table_name])
constraints.extend(cursor.fetchall())
except (ProgrammingError, OperationalError):
# Fall back to "SHOW CREATE TABLE", for previous MySQL versions.
# Go through all constraints and save the equal matches.
cursor.execute("SHOW CREATE TABLE %s" % quote_name(table_name))
for row in cursor.fetchall():
pos = 0
while True:
match = foreign_key_re.search(row[1], pos)
if match == None:
break
pos = match.end()
constraints.append(match.groups())
for my_fieldname, other_table, other_field in constraints:
other_field_index = _name_to_index(cursor, other_table)[other_field]
my_field_index = my_field_dict[my_fieldname]
relations[my_field_index] = (other_field_index, other_table)
return relations
def get_indexes(cursor, table_name):
"""
Returns a dictionary of fieldname -> infodict for the given table,
where each infodict is in the format:
{'primary_key': boolean representing whether it's the primary key,
'unique': boolean representing whether it's a unique index}
"""
cursor.execute("SHOW INDEX FROM %s" % quote_name(table_name))
indexes = {}
for row in cursor.fetchall():
indexes[row[4]] = {'primary_key': (row[2] == 'PRIMARY'), 'unique': not bool(row[1])}
return indexes
DATA_TYPES_REVERSE = {
FIELD_TYPE.BLOB: 'TextField',
FIELD_TYPE.CHAR: 'CharField',
FIELD_TYPE.DECIMAL: 'FloatField',
FIELD_TYPE.DATE: 'DateField',
FIELD_TYPE.DATETIME: 'DateTimeField',
FIELD_TYPE.DOUBLE: 'FloatField',
FIELD_TYPE.FLOAT: 'FloatField',
FIELD_TYPE.INT24: 'IntegerField',
FIELD_TYPE.LONG: 'IntegerField',
FIELD_TYPE.LONGLONG: 'IntegerField',
FIELD_TYPE.SHORT: 'IntegerField',
FIELD_TYPE.STRING: 'TextField',
FIELD_TYPE.TIMESTAMP: 'DateTimeField',
FIELD_TYPE.TINY: 'IntegerField',
FIELD_TYPE.TINY_BLOB: 'TextField',
FIELD_TYPE.MEDIUM_BLOB: 'TextField',
FIELD_TYPE.LONG_BLOB: 'TextField',
FIELD_TYPE.VAR_STRING: 'CharField',
}
|
uwdata/termite-treetm | refs/heads/master | web2py/gluon/contrib/ordereddict.py | 1047 | # Copyright (c) 2009 Raymond Hettinger
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
from UserDict import DictMixin
class OrderedDict(dict, DictMixin):
def __init__(self, *args, **kwds):
if len(args) > 1:
raise TypeError('expected at most 1 arguments, got %d' % len(args))
try:
self.__end
except AttributeError:
self.clear()
self.update(*args, **kwds)
def clear(self):
self.__end = end = []
end += [None, end, end] # sentinel node for doubly linked list
self.__map = {} # key --> [key, prev, next]
dict.clear(self)
def __setitem__(self, key, value):
if key not in self:
end = self.__end
curr = end[1]
curr[2] = end[1] = self.__map[key] = [key, curr, end]
dict.__setitem__(self, key, value)
def __delitem__(self, key):
dict.__delitem__(self, key)
key, prev, next = self.__map.pop(key)
prev[2] = next
next[1] = prev
def __iter__(self):
end = self.__end
curr = end[2]
while curr is not end:
yield curr[0]
curr = curr[2]
def __reversed__(self):
end = self.__end
curr = end[1]
while curr is not end:
yield curr[0]
curr = curr[1]
def popitem(self, last=True):
if not self:
raise KeyError('dictionary is empty')
if last:
key = reversed(self).next()
else:
key = iter(self).next()
value = self.pop(key)
return key, value
def __reduce__(self):
items = [[k, self[k]] for k in self]
tmp = self.__map, self.__end
del self.__map, self.__end
inst_dict = vars(self).copy()
self.__map, self.__end = tmp
if inst_dict:
return (self.__class__, (items,), inst_dict)
return self.__class__, (items,)
def keys(self):
return list(self)
setdefault = DictMixin.setdefault
update = DictMixin.update
pop = DictMixin.pop
values = DictMixin.values
items = DictMixin.items
iterkeys = DictMixin.iterkeys
itervalues = DictMixin.itervalues
iteritems = DictMixin.iteritems
def __repr__(self):
if not self:
return '%s()' % (self.__class__.__name__,)
return '%s(%r)' % (self.__class__.__name__, self.items())
def copy(self):
return self.__class__(self)
@classmethod
def fromkeys(cls, iterable, value=None):
d = cls()
for key in iterable:
d[key] = value
return d
def __eq__(self, other):
if isinstance(other, OrderedDict):
if len(self) != len(other):
return False
for p, q in zip(self.items(), other.items()):
if p != q:
return False
return True
return dict.__eq__(self, other)
def __ne__(self, other):
return not self == other
|
chenzeyu/IbPy | refs/heads/master | ib/ext/OrderComboLeg.py | 9 | #!/usr/bin/env python
""" generated source for module OrderComboLeg """
#
# Original file copyright original author(s).
# This file copyright Troy Melhase, [email protected].
#
# WARNING: all changes to this file will be lost.
from ib.lib import Double
from ib.lib.overloading import overloaded
#
# * OrderComboLeg.java
# *
#
# package: com.ib.client
class OrderComboLeg(object):
""" generated source for class OrderComboLeg """
m_price = float()
# price per leg
@overloaded
def __init__(self):
""" generated source for method __init__ """
self.m_price = Double.MAX_VALUE
@__init__.register(object, float)
def __init___0(self, p_price):
""" generated source for method __init___0 """
self.m_price = p_price
def __eq__(self, p_other):
""" generated source for method equals """
if self is p_other:
return True
elif p_other is None:
return False
l_theOther = p_other
if self.m_price != l_theOther.m_price:
return False
return True
|
repodono/repodono.jobs | refs/heads/master | setup.py | 1 | from setuptools import setup, find_packages
version = '0.0'
classifiers = """
Development Status :: 4 - Beta
Environment :: Console
Intended Audience :: Developers
License :: OSI Approved :: GNU General Public License v2 or later (GPLv2+)
Operating System :: OS Independent
Programming Language :: Python :: 3.5
Programming Language :: Python :: 3.6
""".strip().splitlines()
long_description = (
open('README.rst').read()
+ '\n' +
open('CHANGES.rst').read()
+ '\n')
package_json = {
"devDependencies": {
"eslint": "~3.15.0",
}
}
setup(
name='repodono.jobs',
version=version,
description="Simple job server",
long_description=long_description,
classifiers=classifiers,
keywords='',
author='Tommy Yu',
author_email='[email protected]',
url='https://github.com/repodono/repodono.jobs',
license='gpl',
packages=find_packages('src'),
package_dir={'': 'src'},
package_json=package_json,
extras_require={
'calmjs': [
'calmjs>=2.1.0',
],
'requirejs': [
'calmjs.rjs',
],
'sanic': [
'sanic>=0.4',
],
'dev': [
'aiohttp',
'calmjs.dev>=1.0.1,<2',
],
},
namespace_packages=['repodono'],
include_package_data=True,
python_requires='>=3.4',
zip_safe=False,
install_requires=[
'setuptools',
# -*- Extra requirements: -*-
],
entry_points={
'calmjs.module': [
'repodono.jobs = repodono.jobs',
],
'calmjs.module.tests': [
'repodono.jobs.tests = repodono.jobs.tests',
],
},
calmjs_module_registry=['calmjs.module'],
test_suite="repodono.jobs.tests.make_suite",
)
|
jokajak/itweb | refs/heads/master | data/env/lib/python2.6/site-packages/SQLAlchemy-0.6.7-py2.6.egg/sqlalchemy/dialects/access/__init__.py | 12133432 | |
vfine/webplatform | refs/heads/master | pmModules/__init__.py | 12133432 | |
godiard/sugar | refs/heads/master | extensions/cpsection/webaccount/view.py | 7 | # Copyright (C) 2013, Walter Bender - Raul Gutierrez Segales
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
from gettext import gettext as _
from gi.repository import GLib
from gi.repository import Gtk
from gi.repository import Gdk
from jarabe.webservice.accountsmanager import get_webaccount_services
from jarabe.controlpanel.sectionview import SectionView
from sugar3.graphics.icon import CanvasIcon, Icon
from sugar3.graphics import style
def get_service_name(service):
if hasattr(service, '_account'):
if hasattr(service._account, 'get_description'):
return service._account.get_description()
return ''
class WebServicesConfig(SectionView):
def __init__(self, model, alerts):
SectionView.__init__(self)
self._model = model
self.restart_alerts = alerts
services = get_webaccount_services()
grid = Gtk.Grid()
if len(services) == 0:
grid.set_row_spacing(style.DEFAULT_SPACING)
icon = Icon(pixel_size=style.LARGE_ICON_SIZE,
icon_name='module-webaccount',
stroke_color=style.COLOR_BUTTON_GREY.get_svg(),
fill_color=style.COLOR_TRANSPARENT.get_svg())
grid.attach(icon, 0, 0, 1, 1)
icon.show()
label = Gtk.Label()
label.set_justify(Gtk.Justification.CENTER)
label.set_markup(
'<span foreground="%s" size="large">%s</span>'
% (style.COLOR_BUTTON_GREY.get_html(),
GLib.markup_escape_text(
_('No web services are installed.\n'
'Please visit %s for more details.' %
'http://wiki.sugarlabs.org/go/WebServices'))))
label.show()
grid.attach(label, 0, 1, 1, 1)
alignment = Gtk.Alignment.new(0.5, 0.5, 0.1, 0.1)
alignment.add(grid)
grid.show()
self.add(alignment)
alignment.show()
return
grid.set_row_spacing(style.DEFAULT_SPACING * 4)
grid.set_column_spacing(style.DEFAULT_SPACING * 4)
grid.set_border_width(style.DEFAULT_SPACING * 2)
grid.set_column_homogeneous(True)
width = Gdk.Screen.width() - 2 * style.GRID_CELL_SIZE
nx = int(width / (style.GRID_CELL_SIZE + style.DEFAULT_SPACING * 4))
self._service_config_box = Gtk.VBox()
x = 0
y = 0
for service in services:
service_grid = Gtk.Grid()
icon = CanvasIcon(icon_name=service.get_icon_name())
icon.show()
service_grid.attach(icon, x, y, 1, 1)
icon.connect('activate', service.config_service_cb, None,
self._service_config_box)
label = Gtk.Label()
label.set_justify(Gtk.Justification.CENTER)
name = get_service_name(service)
label.set_markup(name)
service_grid.attach(label, x, y + 1, 1, 1)
label.show()
grid.attach(service_grid, x, y, 1, 1)
service_grid.show()
x += 1
if x == nx:
x = 0
y += 1
alignment = Gtk.Alignment.new(0.5, 0, 0, 0)
alignment.add(grid)
grid.show()
vbox = Gtk.VBox()
vbox.pack_start(alignment, False, False, 0)
alignment.show()
scrolled = Gtk.ScrolledWindow()
vbox.pack_start(scrolled, True, True, 0)
self.add(vbox)
scrolled.set_policy(Gtk.PolicyType.NEVER, Gtk.PolicyType.AUTOMATIC)
scrolled.show()
workspace = Gtk.VBox()
scrolled.add_with_viewport(workspace)
workspace.show()
workspace.add(self._service_config_box)
workspace.show_all()
vbox.show()
def undo(self):
pass
|
tchernomax/ansible | refs/heads/devel | lib/ansible/modules/cloud/google/gcp_compute_backend_service_facts.py | 12 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2017 Google
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# ----------------------------------------------------------------------------
#
# *** AUTO GENERATED CODE *** AUTO GENERATED CODE ***
#
# ----------------------------------------------------------------------------
#
# This file is automatically generated by Magic Modules and manual
# changes will be clobbered when the file is regenerated.
#
# Please read more about how to change this file at
# https://www.github.com/GoogleCloudPlatform/magic-modules
#
# ----------------------------------------------------------------------------
from __future__ import absolute_import, division, print_function
__metaclass__ = type
################################################################################
# Documentation
################################################################################
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ["preview"],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: gcp_compute_backend_service_facts
description:
- Gather facts for GCP BackendService
short_description: Gather facts for GCP BackendService
version_added: 2.7
author: Google Inc. (@googlecloudplatform)
requirements:
- python >= 2.6
- requests >= 2.18.4
- google-auth >= 1.3.0
options:
filters:
description:
A list of filter value pairs. Available filters are listed here
U(https://cloud.google.com/sdk/gcloud/reference/topic/filters).
Each additional filter in the list will act be added as an AND condition
(filter1 and filter2)
extends_documentation_fragment: gcp
'''
EXAMPLES = '''
- name: a backend service facts
gcp_compute_backend_service_facts:
filters:
- name = test_object
project: test_project
auth_kind: service_account
service_account_file: "/tmp/auth.pem"
'''
RETURN = '''
items:
description: List of items
returned: always
type: complex
contains:
affinity_cookie_ttl_sec:
description:
- Lifetime of cookies in seconds if session_affinity is GENERATED_COOKIE. If set to
0, the cookie is non-persistent and lasts only until the end of the browser session
(or equivalent). The maximum allowed value for TTL is one day.
- When the load balancing scheme is INTERNAL, this field is not used.
returned: success
type: int
backends:
description:
- The list of backends that serve this BackendService.
returned: success
type: complex
contains:
balancing_mode:
description:
- Specifies the balancing mode for this backend.
- For global HTTP(S) or TCP/SSL load balancing, the default is UTILIZATION. Valid
values are UTILIZATION, RATE (for HTTP(S)) and CONNECTION (for TCP/SSL).
- This cannot be used for internal load balancing.
returned: success
type: str
capacity_scaler:
description:
- A multiplier applied to the group's maximum servicing capacity (based on UTILIZATION,
RATE or CONNECTION).
- Default value is 1, which means the group will serve up to 100% of its configured
capacity (depending on balancingMode). A setting of 0 means the group is completely
drained, offering 0% of its available Capacity. Valid range is [0.0,1.0].
- This cannot be used for internal load balancing.
returned: success
type: str
description:
description:
- An optional description of this resource.
- Provide this property when you create the resource.
returned: success
type: str
group:
description:
- This instance group defines the list of instances that serve traffic. Member virtual
machine instances from each instance group must live in the same zone as the instance
group itself.
- No two backends in a backend service are allowed to use same Instance Group resource.
- When the BackendService has load balancing scheme INTERNAL, the instance group must
be in a zone within the same region as the BackendService.
returned: success
type: dict
max_connections:
description:
- The max number of simultaneous connections for the group. Can be used with either
CONNECTION or UTILIZATION balancing modes.
- For CONNECTION mode, either maxConnections or maxConnectionsPerInstance must be
set.
- This cannot be used for internal load balancing.
returned: success
type: int
max_connections_per_instance:
description:
- The max number of simultaneous connections that a single backend instance can handle.
This is used to calculate the capacity of the group. Can be used in either CONNECTION
or UTILIZATION balancing modes.
- For CONNECTION mode, either maxConnections or maxConnectionsPerInstance must be
set.
- This cannot be used for internal load balancing.
returned: success
type: int
max_rate:
description:
- The max requests per second (RPS) of the group.
- Can be used with either RATE or UTILIZATION balancing modes, but required if RATE
mode. For RATE mode, either maxRate or maxRatePerInstance must be set.
- This cannot be used for internal load balancing.
returned: success
type: int
max_rate_per_instance:
description:
- The max requests per second (RPS) that a single backend instance can handle. This
is used to calculate the capacity of the group. Can be used in either balancing
mode. For RATE mode, either maxRate or maxRatePerInstance must be set.
- This cannot be used for internal load balancing.
returned: success
type: str
max_utilization:
description:
- Used when balancingMode is UTILIZATION. This ratio defines the CPU utilization target
for the group. The default is 0.8. Valid range is [0.0, 1.0].
- This cannot be used for internal load balancing.
returned: success
type: str
cdn_policy:
description:
- Cloud CDN configuration for this BackendService.
returned: success
type: complex
contains:
cache_key_policy:
description:
- The CacheKeyPolicy for this CdnPolicy.
returned: success
type: complex
contains:
include_host:
description:
- If true requests to different hosts will be cached separately.
returned: success
type: bool
include_protocol:
description:
- If true, http and https requests will be cached separately.
returned: success
type: bool
include_query_string:
description:
- If true, include query string parameters in the cache key according to query_string_whitelist
and query_string_blacklist. If neither is set, the entire query string will be included.
- If false, the query string will be excluded from the cache key entirely.
returned: success
type: bool
query_string_blacklist:
description:
- Names of query string parameters to exclude in cache keys.
- All other parameters will be included. Either specify query_string_whitelist or
query_string_blacklist, not both.
- "'&' and '=' will be percent encoded and not treated as delimiters."
returned: success
type: list
query_string_whitelist:
description:
- Names of query string parameters to include in cache keys.
- All other parameters will be excluded. Either specify query_string_whitelist or
query_string_blacklist, not both.
- "'&' and '=' will be percent encoded and not treated as delimiters."
returned: success
type: list
connection_draining:
description:
- Settings for connection draining.
returned: success
type: complex
contains:
draining_timeout_sec:
description:
- Time for which instance will be drained (not accept new connections, but still work
to finish started).
returned: success
type: int
creation_timestamp:
description:
- Creation timestamp in RFC3339 text format.
returned: success
type: str
description:
description:
- An optional description of this resource.
returned: success
type: str
enable_cdn:
description:
- If true, enable Cloud CDN for this BackendService.
- When the load balancing scheme is INTERNAL, this field is not used.
returned: success
type: bool
health_checks:
description:
- The list of URLs to the HttpHealthCheck or HttpsHealthCheck resource for health
checking this BackendService. Currently at most one health check can be specified,
and a health check is required.
- For internal load balancing, a URL to a HealthCheck resource must be specified instead.
returned: success
type: list
id:
description:
- The unique identifier for the resource.
returned: success
type: int
name:
description:
- Name of the resource. Provided by the client when the resource is created. The name
must be 1-63 characters long, and comply with RFC1035. Specifically, the name must
be 1-63 characters long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?`
which means the first character must be a lowercase letter, and all following characters
must be a dash, lowercase letter, or digit, except the last character, which cannot
be a dash.
returned: success
type: str
port_name:
description:
- Name of backend port. The same name should appear in the instance groups referenced
by this service. Required when the load balancing scheme is EXTERNAL.
- When the load balancing scheme is INTERNAL, this field is not used.
returned: success
type: str
protocol:
description:
- The protocol this BackendService uses to communicate with backends.
- Possible values are HTTP, HTTPS, TCP, and SSL. The default is HTTP.
- For internal load balancing, the possible values are TCP and UDP, and the default
is TCP.
returned: success
type: str
region:
description:
- The region where the regional backend service resides.
- This field is not applicable to global backend services.
returned: success
type: str
session_affinity:
description:
- Type of session affinity to use. The default is NONE.
- When the load balancing scheme is EXTERNAL, can be NONE, CLIENT_IP, or GENERATED_COOKIE.
- When the load balancing scheme is INTERNAL, can be NONE, CLIENT_IP, CLIENT_IP_PROTO,
or CLIENT_IP_PORT_PROTO.
- When the protocol is UDP, this field is not used.
returned: success
type: str
timeout_sec:
description:
- How many seconds to wait for the backend before considering it a failed request.
Default is 30 seconds. Valid range is [1, 86400].
returned: success
type: int
'''
################################################################################
# Imports
################################################################################
from ansible.module_utils.gcp_utils import navigate_hash, GcpSession, GcpModule, GcpRequest
import json
################################################################################
# Main
################################################################################
def main():
module = GcpModule(
argument_spec=dict(
filters=dict(type='list', elements='str'),
)
)
if 'scopes' not in module.params:
module.params['scopes'] = ['https://www.googleapis.com/auth/compute']
items = fetch_list(module, collection(module), query_options(module.params['filters']))
if items.get('items'):
items = items.get('items')
else:
items = []
return_value = {
'items': items
}
module.exit_json(**return_value)
def collection(module):
return "https://www.googleapis.com/compute/v1/projects/{project}/global/backendServices".format(**module.params)
def fetch_list(module, link, query):
auth = GcpSession(module, 'compute')
response = auth.get(link, params={'filter': query})
return return_if_object(module, response)
def query_options(filters):
if not filters:
return ''
if len(filters) == 1:
return filters[0]
else:
queries = []
for f in filters:
# For multiple queries, all queries should have ()
if f[0] != '(' and f[-1] != ')':
queries.append("(%s)" % ''.join(f))
else:
queries.append(f)
return ' '.join(queries)
def return_if_object(module, response):
# If not found, return nothing.
if response.status_code == 404:
return None
# If no content, return nothing.
if response.status_code == 204:
return None
try:
module.raise_for_status(response)
result = response.json()
except getattr(json.decoder, 'JSONDecodeError', ValueError) as inst:
module.fail_json(msg="Invalid JSON response with error: %s" % inst)
if navigate_hash(result, ['error', 'errors']):
module.fail_json(msg=navigate_hash(result, ['error', 'errors']))
return result
if __name__ == "__main__":
main()
|
crosswalk-project/blink-crosswalk | refs/heads/master | Source/bindings/scripts/generate_event_interfaces.py | 22 | #!/usr/bin/python
#
# Copyright (C) 2013 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Generate event interfaces .in file (EventInterfaces.in).
The event interfaces .in file contains a list of all Event interfaces, i.e.,
all interfaces that inherit from Event, including Event itself,
together with certain extended attributes.
Paths are in POSIX format, and relative to Source/.
This list is used in core/ to generate EventFactory and EventNames.
The .in format is documented in build/scripts/in_file.py.
"""
from optparse import OptionParser
import os
import posixpath
import sys
from utilities import get_file_contents, read_file_to_list, write_file, get_interface_extended_attributes_from_idl
EXPORTED_EXTENDED_ATTRIBUTES = (
'Conditional',
'ImplementedAs',
'RuntimeEnabled',
)
module_path = os.path.dirname(os.path.realpath(__file__))
source_dir = os.path.normpath(os.path.join(module_path, os.pardir, os.pardir))
def parse_options():
parser = OptionParser()
parser.add_option('--event-idl-files-list', help='file listing event IDL files')
parser.add_option('--event-interfaces-file', help='output file')
parser.add_option('--write-file-only-if-changed', type='int', help='if true, do not write an output file if it would be identical to the existing one, which avoids unnecessary rebuilds in ninja')
parser.add_option('--suffix', help='specify a suffix to the namespace, i.e., "Modules". Default is None.')
options, args = parser.parse_args()
if options.event_idl_files_list is None:
parser.error('Must specify a file listing event IDL files using --event-idl-files-list.')
if options.event_interfaces_file is None:
parser.error('Must specify an output file using --event-interfaces-file.')
if options.write_file_only_if_changed is None:
parser.error('Must specify whether file is only written if changed using --write-file-only-if-changed.')
options.write_file_only_if_changed = bool(options.write_file_only_if_changed)
if args:
parser.error('No arguments allowed, but %d given.' % len(args))
return options
def write_event_interfaces_file(event_idl_files, destination_filename, only_if_changed, suffix):
def extended_attribute_string(name, value):
if name == 'RuntimeEnabled':
value += 'Enabled'
return name + '=' + value
def interface_line(full_path):
relative_path_local, _ = os.path.splitext(os.path.relpath(full_path, source_dir))
relative_path_posix = relative_path_local.replace(os.sep, posixpath.sep)
idl_file_contents = get_file_contents(full_path)
extended_attributes = get_interface_extended_attributes_from_idl(idl_file_contents)
extended_attributes_list = [
extended_attribute_string(name, extended_attributes[name])
for name in EXPORTED_EXTENDED_ATTRIBUTES
if name in extended_attributes]
return '%s %s\n' % (relative_path_posix,
', '.join(extended_attributes_list))
lines = ['namespace="Event"\n']
if suffix:
lines.append('suffix="' + suffix + '"\n')
lines.append('export=%s_EXPORT\n' % suffix.upper())
else:
lines.append('export=CORE_EXPORT\n')
lines.append('\n')
interface_lines = [interface_line(event_idl_file)
for event_idl_file in event_idl_files]
interface_lines.sort()
lines.extend(interface_lines)
write_file(''.join(lines), destination_filename, only_if_changed)
################################################################################
def main():
options = parse_options()
event_idl_files = read_file_to_list(options.event_idl_files_list)
write_event_interfaces_file(event_idl_files,
options.event_interfaces_file,
options.write_file_only_if_changed,
options.suffix)
if __name__ == '__main__':
sys.exit(main())
|
uaarg/missioncommander | refs/heads/suas2017 | ui/__init__.py | 1 | from .ui import UI
|
sodafree/backend | refs/heads/master | build/lib.linux-i686-2.7/django/contrib/localflavor/uy/forms.py | 87 | # -*- coding: utf-8 -*-
"""
UY-specific form helpers.
"""
from __future__ import absolute_import
from django.core.validators import EMPTY_VALUES
from django.forms.fields import Select, RegexField
from django.forms import ValidationError
from django.utils.translation import ugettext_lazy as _
from django.contrib.localflavor.uy.util import get_validation_digit
class UYDepartamentSelect(Select):
"""
A Select widget that uses a list of Uruguayan departaments as its choices.
"""
def __init__(self, attrs=None):
from django.contrib.localflavor.uy.uy_departaments import DEPARTAMENT_CHOICES
super(UYDepartamentSelect, self).__init__(attrs, choices=DEPARTAMENT_CHOICES)
class UYCIField(RegexField):
"""
A field that validates Uruguayan 'Cedula de identidad' (CI) numbers.
"""
default_error_messages = {
'invalid': _("Enter a valid CI number in X.XXX.XXX-X,"
"XXXXXXX-X or XXXXXXXX format."),
'invalid_validation_digit': _("Enter a valid CI number."),
}
def __init__(self, *args, **kwargs):
super(UYCIField, self).__init__(r'(?P<num>(\d{6,7}|(\d\.)?\d{3}\.\d{3}))-?(?P<val>\d)',
*args, **kwargs)
def clean(self, value):
"""
Validates format and validation digit.
The official format is [X.]XXX.XXX-X but usually dots and/or slash are
omitted so, when validating, those characters are ignored if found in
the correct place. The three typically used formats are supported:
[X]XXXXXXX, [X]XXXXXX-X and [X.]XXX.XXX-X.
"""
value = super(UYCIField, self).clean(value)
if value in EMPTY_VALUES:
return u''
match = self.regex.match(value)
if not match:
raise ValidationError(self.error_messages['invalid'])
number = int(match.group('num').replace('.', ''))
validation_digit = int(match.group('val'))
if not validation_digit == get_validation_digit(number):
raise ValidationError(self.error_messages['invalid_validation_digit'])
return value
|
Dev-Cloud-Platform/Dev-Cloud | refs/heads/master | dev_cloud/cc1/src/clm/views/admin_cm/template.py | 2 | # -*- coding: utf-8 -*-
# @COPYRIGHT_begin
#
# Copyright [2010-2014] Institute of Nuclear Physics PAN, Krakow, Poland
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# @COPYRIGHT_end
"""@package src.clm.views.admin_cm.template
"""
from clm.utils.decorators import admin_cm_log, cm_request
@admin_cm_log(log=True)
@cm_request
def add(cm_response, **data):
"""
Creates template of VM. Template has a name and some description.
It defines VM's hardware parameters: CPU and memory. It also defines
number of points utilized by VM created of it (per hour and overly).
@clmview_admin_cm
@cm_request_transparent{user.add()}
"""
return cm_response
@admin_cm_log(log=True, pack=False)
@cm_request
def delete(cm_response, **data):
"""
Deletes template from available templates.
@clmview_admin_cm
@cm_request_transparent{user.delete()}
"""
return cm_response
@admin_cm_log(log=True)
@cm_request
def edit(cm_response, **data):
"""
Edits Template's components.
@clmview_admin_cm
@cm_request_transparent{user.edit()}
"""
return cm_response
@admin_cm_log(log=True, pack=False)
@cm_request
def get_list(cm_response, **data):
"""
Returns list of available Templates.
@clmview_admin_cm
@cm_request_transparent{user.get_list()}
"""
return cm_response
@admin_cm_log(log=False, pack=False)
@cm_request
def get_by_id(cm_response, **data):
"""
Returns requested Template.
@clmview_admin_cm
@cm_request_transparent{user.get_by_id()}
"""
return cm_response
|
stwunsch/gnuradio | refs/heads/master | gr-filter/python/filter/gui/pyqt_filter_stacked.py | 58 | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'pyqt_filter_stacked.ui'
#
# Created: Wed Aug 8 11:42:47 2012
# by: PyQt4 UI code generator 4.9.1
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
_fromUtf8 = lambda s: s
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName(_fromUtf8("MainWindow"))
MainWindow.resize(1128, 649)
self.centralwidget = QtGui.QWidget(MainWindow)
self.centralwidget.setObjectName(_fromUtf8("centralwidget"))
self.gridLayout = QtGui.QGridLayout(self.centralwidget)
self.gridLayout.setObjectName(_fromUtf8("gridLayout"))
self.stackedWindows = QtGui.QStackedWidget(self.centralwidget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.stackedWindows.sizePolicy().hasHeightForWidth())
self.stackedWindows.setSizePolicy(sizePolicy)
self.stackedWindows.setObjectName(_fromUtf8("stackedWindows"))
self.classic = QtGui.QWidget()
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.classic.sizePolicy().hasHeightForWidth())
self.classic.setSizePolicy(sizePolicy)
self.classic.setObjectName(_fromUtf8("classic"))
self.horizontalLayout = QtGui.QHBoxLayout(self.classic)
self.horizontalLayout.setObjectName(_fromUtf8("horizontalLayout"))
self.splitter = QtGui.QSplitter(self.classic)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(1)
sizePolicy.setHeightForWidth(self.splitter.sizePolicy().hasHeightForWidth())
self.splitter.setSizePolicy(sizePolicy)
self.splitter.setMinimumSize(QtCore.QSize(600, 0))
self.splitter.setOrientation(QtCore.Qt.Vertical)
self.splitter.setObjectName(_fromUtf8("splitter"))
self.tabGroup = QtGui.QTabWidget(self.splitter)
self.tabGroup.setEnabled(True)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(1)
sizePolicy.setHeightForWidth(self.tabGroup.sizePolicy().hasHeightForWidth())
self.tabGroup.setSizePolicy(sizePolicy)
self.tabGroup.setTabsClosable(False)
self.tabGroup.setMovable(False)
self.tabGroup.setObjectName(_fromUtf8("tabGroup"))
self.freqTab = QtGui.QWidget()
self.freqTab.setObjectName(_fromUtf8("freqTab"))
self.horizontalLayout_9 = QtGui.QHBoxLayout(self.freqTab)
self.horizontalLayout_9.setObjectName(_fromUtf8("horizontalLayout_9"))
self.freqPlot = Qwt5.QwtPlot(self.freqTab)
self.freqPlot.setObjectName(_fromUtf8("freqPlot"))
self.horizontalLayout_9.addWidget(self.freqPlot)
self.tabGroup.addTab(self.freqTab, _fromUtf8(""))
self.timeTab = QtGui.QWidget()
self.timeTab.setObjectName(_fromUtf8("timeTab"))
self.horizontalLayout_10 = QtGui.QHBoxLayout(self.timeTab)
self.horizontalLayout_10.setObjectName(_fromUtf8("horizontalLayout_10"))
self.timePlot = Qwt5.QwtPlot(self.timeTab)
self.timePlot.setObjectName(_fromUtf8("timePlot"))
self.horizontalLayout_10.addWidget(self.timePlot)
self.tabGroup.addTab(self.timeTab, _fromUtf8(""))
self.phaseTab = QtGui.QWidget()
self.phaseTab.setObjectName(_fromUtf8("phaseTab"))
self.horizontalLayout_11 = QtGui.QHBoxLayout(self.phaseTab)
self.horizontalLayout_11.setObjectName(_fromUtf8("horizontalLayout_11"))
self.phasePlot = Qwt5.QwtPlot(self.phaseTab)
self.phasePlot.setObjectName(_fromUtf8("phasePlot"))
self.horizontalLayout_11.addWidget(self.phasePlot)
self.tabGroup.addTab(self.phaseTab, _fromUtf8(""))
self.groupTab = QtGui.QWidget()
self.groupTab.setObjectName(_fromUtf8("groupTab"))
self.horizontalLayout_12 = QtGui.QHBoxLayout(self.groupTab)
self.horizontalLayout_12.setObjectName(_fromUtf8("horizontalLayout_12"))
self.groupPlot = Qwt5.QwtPlot(self.groupTab)
self.groupPlot.setObjectName(_fromUtf8("groupPlot"))
self.horizontalLayout_12.addWidget(self.groupPlot)
self.tabGroup.addTab(self.groupTab, _fromUtf8(""))
self.fcTab = QtGui.QWidget()
self.fcTab.setObjectName(_fromUtf8("fcTab"))
self.horizontalLayout_7 = QtGui.QHBoxLayout(self.fcTab)
self.horizontalLayout_7.setObjectName(_fromUtf8("horizontalLayout_7"))
self.filterCoeff = QtGui.QTextBrowser(self.fcTab)
self.filterCoeff.setObjectName(_fromUtf8("filterCoeff"))
self.horizontalLayout_7.addWidget(self.filterCoeff)
self.tabGroup.addTab(self.fcTab, _fromUtf8(""))
self.impresTab = QtGui.QWidget()
self.impresTab.setObjectName(_fromUtf8("impresTab"))
self.horizontalLayout_6 = QtGui.QHBoxLayout(self.impresTab)
self.horizontalLayout_6.setObjectName(_fromUtf8("horizontalLayout_6"))
self.impresPlot = Qwt5.QwtPlot(self.impresTab)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.impresPlot.sizePolicy().hasHeightForWidth())
self.impresPlot.setSizePolicy(sizePolicy)
self.impresPlot.setObjectName(_fromUtf8("impresPlot"))
self.horizontalLayout_6.addWidget(self.impresPlot)
self.tabGroup.addTab(self.impresTab, _fromUtf8(""))
self.stepresTab = QtGui.QWidget()
self.stepresTab.setObjectName(_fromUtf8("stepresTab"))
self.horizontalLayout_8 = QtGui.QHBoxLayout(self.stepresTab)
self.horizontalLayout_8.setObjectName(_fromUtf8("horizontalLayout_8"))
self.stepresPlot = Qwt5.QwtPlot(self.stepresTab)
self.stepresPlot.setObjectName(_fromUtf8("stepresPlot"))
self.horizontalLayout_8.addWidget(self.stepresPlot)
self.tabGroup.addTab(self.stepresTab, _fromUtf8(""))
self.pdelayTab = QtGui.QWidget()
self.pdelayTab.setObjectName(_fromUtf8("pdelayTab"))
self.horizontalLayout_17 = QtGui.QHBoxLayout(self.pdelayTab)
self.horizontalLayout_17.setObjectName(_fromUtf8("horizontalLayout_17"))
self.pdelayPlot = Qwt5.QwtPlot(self.pdelayTab)
self.pdelayPlot.setObjectName(_fromUtf8("pdelayPlot"))
self.horizontalLayout_17.addWidget(self.pdelayPlot)
self.tabGroup.addTab(self.pdelayTab, _fromUtf8(""))
self.filterspecView = QtGui.QTabWidget(self.splitter)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.filterspecView.sizePolicy().hasHeightForWidth())
self.filterspecView.setSizePolicy(sizePolicy)
self.filterspecView.setMinimumSize(QtCore.QSize(0, 100))
self.filterspecView.setBaseSize(QtCore.QSize(0, 100))
self.filterspecView.setDocumentMode(False)
self.filterspecView.setTabsClosable(False)
self.filterspecView.setObjectName(_fromUtf8("filterspecView"))
self.bandDiagram = QtGui.QWidget()
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.bandDiagram.sizePolicy().hasHeightForWidth())
self.bandDiagram.setSizePolicy(sizePolicy)
self.bandDiagram.setObjectName(_fromUtf8("bandDiagram"))
self.horizontalLayout_13 = QtGui.QHBoxLayout(self.bandDiagram)
self.horizontalLayout_13.setObjectName(_fromUtf8("horizontalLayout_13"))
self.bandView = BandGraphicsView(self.bandDiagram)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(1)
sizePolicy.setHeightForWidth(self.bandView.sizePolicy().hasHeightForWidth())
self.bandView.setSizePolicy(sizePolicy)
self.bandView.setMinimumSize(QtCore.QSize(525, 249))
self.bandView.setObjectName(_fromUtf8("bandView"))
self.horizontalLayout_13.addWidget(self.bandView)
self.filterspecView.addTab(self.bandDiagram, _fromUtf8(""))
self.poleZero = QtGui.QWidget()
self.poleZero.setAutoFillBackground(False)
self.poleZero.setObjectName(_fromUtf8("poleZero"))
self.gridLayout_2 = QtGui.QGridLayout(self.poleZero)
self.gridLayout_2.setObjectName(_fromUtf8("gridLayout_2"))
self.pzPlot = PzPlot(self.poleZero)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.pzPlot.sizePolicy().hasHeightForWidth())
self.pzPlot.setSizePolicy(sizePolicy)
self.pzPlot.setObjectName(_fromUtf8("pzPlot"))
self.gridLayout_2.addWidget(self.pzPlot, 0, 0, 1, 1)
self.pzgroupBox = QtGui.QGroupBox(self.poleZero)
self.pzgroupBox.setCursor(QtGui.QCursor(QtCore.Qt.ArrowCursor))
self.pzgroupBox.setTitle(_fromUtf8(""))
self.pzgroupBox.setFlat(False)
self.pzgroupBox.setCheckable(False)
self.pzgroupBox.setObjectName(_fromUtf8("pzgroupBox"))
self.verticalLayout_3 = QtGui.QVBoxLayout(self.pzgroupBox)
self.verticalLayout_3.setObjectName(_fromUtf8("verticalLayout_3"))
self.addzeroPush = QtGui.QToolButton(self.pzgroupBox)
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap(_fromUtf8(":/icons/add_zero.svg")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.addzeroPush.setIcon(icon)
self.addzeroPush.setIconSize(QtCore.QSize(16, 16))
self.addzeroPush.setCheckable(True)
self.addzeroPush.setObjectName(_fromUtf8("addzeroPush"))
self.verticalLayout_3.addWidget(self.addzeroPush)
self.addpolePush = QtGui.QToolButton(self.pzgroupBox)
icon1 = QtGui.QIcon()
icon1.addPixmap(QtGui.QPixmap(_fromUtf8(":/icons/add_pole.svg")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.addpolePush.setIcon(icon1)
self.addpolePush.setCheckable(True)
self.addpolePush.setObjectName(_fromUtf8("addpolePush"))
self.verticalLayout_3.addWidget(self.addpolePush)
self.delPush = QtGui.QToolButton(self.pzgroupBox)
icon2 = QtGui.QIcon()
icon2.addPixmap(QtGui.QPixmap(_fromUtf8(":/icons/remove_red.svg")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.delPush.setIcon(icon2)
self.delPush.setCheckable(True)
self.delPush.setObjectName(_fromUtf8("delPush"))
self.verticalLayout_3.addWidget(self.delPush)
self.conjPush = QtGui.QToolButton(self.pzgroupBox)
icon3 = QtGui.QIcon()
icon3.addPixmap(QtGui.QPixmap(_fromUtf8(":/icons/conjugate.svg")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.conjPush.setIcon(icon3)
self.conjPush.setIconSize(QtCore.QSize(16, 16))
self.conjPush.setCheckable(True)
self.conjPush.setObjectName(_fromUtf8("conjPush"))
self.verticalLayout_3.addWidget(self.conjPush)
self.gridLayout_2.addWidget(self.pzgroupBox, 0, 1, 1, 1)
self.pzstatusBar = QtGui.QStatusBar(self.poleZero)
self.pzstatusBar.setObjectName(_fromUtf8("pzstatusBar"))
self.gridLayout_2.addWidget(self.pzstatusBar, 1, 0, 1, 2)
self.filterspecView.addTab(self.poleZero, _fromUtf8(""))
self.horizontalLayout.addWidget(self.splitter)
self.quickFrame = QtGui.QFrame(self.classic)
self.quickFrame.setMinimumSize(QtCore.QSize(180, 200))
self.quickFrame.setFrameShape(QtGui.QFrame.StyledPanel)
self.quickFrame.setFrameShadow(QtGui.QFrame.Raised)
self.quickFrame.setObjectName(_fromUtf8("quickFrame"))
self.responseBox = QtGui.QGroupBox(self.quickFrame)
self.responseBox.setGeometry(QtCore.QRect(10, 10, 161, 251))
self.responseBox.setObjectName(_fromUtf8("responseBox"))
self.checkMagres = QtGui.QCheckBox(self.responseBox)
self.checkMagres.setGeometry(QtCore.QRect(10, 40, 151, 19))
self.checkMagres.setChecked(True)
self.checkMagres.setObjectName(_fromUtf8("checkMagres"))
self.checkPhase = QtGui.QCheckBox(self.responseBox)
self.checkPhase.setGeometry(QtCore.QRect(10, 60, 151, 19))
self.checkPhase.setChecked(True)
self.checkPhase.setObjectName(_fromUtf8("checkPhase"))
self.checkGdelay = QtGui.QCheckBox(self.responseBox)
self.checkGdelay.setGeometry(QtCore.QRect(10, 80, 111, 19))
self.checkGdelay.setChecked(True)
self.checkGdelay.setObjectName(_fromUtf8("checkGdelay"))
self.checkPdelay = QtGui.QCheckBox(self.responseBox)
self.checkPdelay.setGeometry(QtCore.QRect(10, 100, 111, 19))
self.checkPdelay.setChecked(True)
self.checkPdelay.setObjectName(_fromUtf8("checkPdelay"))
self.checkImpulse = QtGui.QCheckBox(self.responseBox)
self.checkImpulse.setGeometry(QtCore.QRect(10, 120, 141, 19))
self.checkImpulse.setChecked(True)
self.checkImpulse.setObjectName(_fromUtf8("checkImpulse"))
self.checkStep = QtGui.QCheckBox(self.responseBox)
self.checkStep.setGeometry(QtCore.QRect(10, 140, 131, 19))
self.checkStep.setChecked(True)
self.checkStep.setObjectName(_fromUtf8("checkStep"))
self.checkGrid = QtGui.QCheckBox(self.responseBox)
self.checkGrid.setGeometry(QtCore.QRect(10, 160, 85, 19))
self.checkGrid.setObjectName(_fromUtf8("checkGrid"))
self.checkFcoeff = QtGui.QCheckBox(self.responseBox)
self.checkFcoeff.setGeometry(QtCore.QRect(10, 180, 131, 19))
self.checkFcoeff.setChecked(True)
self.checkFcoeff.setObjectName(_fromUtf8("checkFcoeff"))
self.checkKeepcur = QtGui.QCheckBox(self.responseBox)
self.checkKeepcur.setGeometry(QtCore.QRect(10, 200, 141, 19))
self.checkKeepcur.setObjectName(_fromUtf8("checkKeepcur"))
self.groupSpecs = QtGui.QGroupBox(self.quickFrame)
self.groupSpecs.setGeometry(QtCore.QRect(10, 280, 161, 91))
self.groupSpecs.setObjectName(_fromUtf8("groupSpecs"))
self.checkBand = QtGui.QCheckBox(self.groupSpecs)
self.checkBand.setGeometry(QtCore.QRect(10, 20, 111, 19))
self.checkBand.setChecked(True)
self.checkBand.setObjectName(_fromUtf8("checkBand"))
self.checkPzplot = QtGui.QCheckBox(self.groupSpecs)
self.checkPzplot.setGeometry(QtCore.QRect(10, 40, 131, 19))
self.checkPzplot.setChecked(True)
self.checkPzplot.setObjectName(_fromUtf8("checkPzplot"))
self.sysParamsBox = QtGui.QGroupBox(self.quickFrame)
self.sysParamsBox.setGeometry(QtCore.QRect(10, 390, 161, 91))
self.sysParamsBox.setObjectName(_fromUtf8("sysParamsBox"))
self.formLayout_4 = QtGui.QFormLayout(self.sysParamsBox)
self.formLayout_4.setFieldGrowthPolicy(QtGui.QFormLayout.AllNonFixedFieldsGrow)
self.formLayout_4.setObjectName(_fromUtf8("formLayout_4"))
self.nfftLabel = QtGui.QLabel(self.sysParamsBox)
self.nfftLabel.setMinimumSize(QtCore.QSize(150, 0))
self.nfftLabel.setObjectName(_fromUtf8("nfftLabel"))
self.formLayout_4.setWidget(1, QtGui.QFormLayout.LabelRole, self.nfftLabel)
self.nfftEdit = QtGui.QLineEdit(self.sysParamsBox)
self.nfftEdit.setObjectName(_fromUtf8("nfftEdit"))
self.formLayout_4.setWidget(2, QtGui.QFormLayout.LabelRole, self.nfftEdit)
self.horizontalLayout.addWidget(self.quickFrame)
self.stackedWindows.addWidget(self.classic)
self.modern = QtGui.QWidget()
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.modern.sizePolicy().hasHeightForWidth())
self.modern.setSizePolicy(sizePolicy)
self.modern.setObjectName(_fromUtf8("modern"))
self.horizontalLayout_5 = QtGui.QHBoxLayout(self.modern)
self.horizontalLayout_5.setObjectName(_fromUtf8("horizontalLayout_5"))
self.splitter_3 = QtGui.QSplitter(self.modern)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.splitter_3.sizePolicy().hasHeightForWidth())
self.splitter_3.setSizePolicy(sizePolicy)
self.splitter_3.setOrientation(QtCore.Qt.Vertical)
self.splitter_3.setObjectName(_fromUtf8("splitter_3"))
self.splitter_2 = QtGui.QSplitter(self.splitter_3)
self.splitter_2.setOrientation(QtCore.Qt.Horizontal)
self.splitter_2.setObjectName(_fromUtf8("splitter_2"))
self.mfreqTabgroup = QtGui.QTabWidget(self.splitter_2)
self.mfreqTabgroup.setTabsClosable(False)
self.mfreqTabgroup.setObjectName(_fromUtf8("mfreqTabgroup"))
self.mfreqTab = QtGui.QWidget()
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.mfreqTab.sizePolicy().hasHeightForWidth())
self.mfreqTab.setSizePolicy(sizePolicy)
self.mfreqTab.setObjectName(_fromUtf8("mfreqTab"))
self.horizontalLayout_2 = QtGui.QHBoxLayout(self.mfreqTab)
self.horizontalLayout_2.setObjectName(_fromUtf8("horizontalLayout_2"))
self.mfreqPlot = Qwt5.QwtPlot(self.mfreqTab)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.mfreqPlot.sizePolicy().hasHeightForWidth())
self.mfreqPlot.setSizePolicy(sizePolicy)
self.mfreqPlot.setObjectName(_fromUtf8("mfreqPlot"))
self.horizontalLayout_2.addWidget(self.mfreqPlot)
self.mfgroupBox = QtGui.QGroupBox(self.mfreqTab)
self.mfgroupBox.setCursor(QtGui.QCursor(QtCore.Qt.ArrowCursor))
self.mfgroupBox.setTitle(_fromUtf8(""))
self.mfgroupBox.setFlat(False)
self.mfgroupBox.setCheckable(False)
self.mfgroupBox.setObjectName(_fromUtf8("mfgroupBox"))
self.verticalLayout_2 = QtGui.QVBoxLayout(self.mfgroupBox)
self.verticalLayout_2.setObjectName(_fromUtf8("verticalLayout_2"))
self.mfmagPush = QtGui.QToolButton(self.mfgroupBox)
icon4 = QtGui.QIcon()
icon4.addPixmap(QtGui.QPixmap(_fromUtf8(":/icons/mag_response.svg")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.mfmagPush.setIcon(icon4)
self.mfmagPush.setIconSize(QtCore.QSize(16, 16))
self.mfmagPush.setCheckable(False)
self.mfmagPush.setObjectName(_fromUtf8("mfmagPush"))
self.verticalLayout_2.addWidget(self.mfmagPush)
self.mfphasePush = QtGui.QToolButton(self.mfgroupBox)
icon5 = QtGui.QIcon()
icon5.addPixmap(QtGui.QPixmap(_fromUtf8(":/icons/phase_response.svg")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.mfphasePush.setIcon(icon5)
self.mfphasePush.setIconSize(QtCore.QSize(16, 16))
self.mfphasePush.setCheckable(False)
self.mfphasePush.setObjectName(_fromUtf8("mfphasePush"))
self.verticalLayout_2.addWidget(self.mfphasePush)
self.mfgpdlyPush = QtGui.QToolButton(self.mfgroupBox)
icon6 = QtGui.QIcon()
icon6.addPixmap(QtGui.QPixmap(_fromUtf8(":/icons/group_delay.svg")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.mfgpdlyPush.setIcon(icon6)
self.mfgpdlyPush.setIconSize(QtCore.QSize(16, 16))
self.mfgpdlyPush.setCheckable(False)
self.mfgpdlyPush.setObjectName(_fromUtf8("mfgpdlyPush"))
self.verticalLayout_2.addWidget(self.mfgpdlyPush)
self.mfphdlyPush = QtGui.QToolButton(self.mfgroupBox)
icon7 = QtGui.QIcon()
icon7.addPixmap(QtGui.QPixmap(_fromUtf8(":/icons/phase_delay.svg")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.mfphdlyPush.setIcon(icon7)
self.mfphdlyPush.setIconSize(QtCore.QSize(16, 16))
self.mfphdlyPush.setCheckable(False)
self.mfphdlyPush.setObjectName(_fromUtf8("mfphdlyPush"))
self.verticalLayout_2.addWidget(self.mfphdlyPush)
self.mfoverlayPush = QtGui.QToolButton(self.mfgroupBox)
icon8 = QtGui.QIcon()
icon8.addPixmap(QtGui.QPixmap(_fromUtf8(":/icons/overlay.svg")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.mfoverlayPush.setIcon(icon8)
self.mfoverlayPush.setIconSize(QtCore.QSize(16, 16))
self.mfoverlayPush.setCheckable(True)
self.mfoverlayPush.setObjectName(_fromUtf8("mfoverlayPush"))
self.verticalLayout_2.addWidget(self.mfoverlayPush)
self.horizontalLayout_2.addWidget(self.mfgroupBox)
self.mfreqTabgroup.addTab(self.mfreqTab, _fromUtf8(""))
self.mtimeTabgroup = QtGui.QTabWidget(self.splitter_2)
self.mtimeTabgroup.setObjectName(_fromUtf8("mtimeTabgroup"))
self.mtimeTab = QtGui.QWidget()
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.mtimeTab.sizePolicy().hasHeightForWidth())
self.mtimeTab.setSizePolicy(sizePolicy)
self.mtimeTab.setObjectName(_fromUtf8("mtimeTab"))
self.horizontalLayout_3 = QtGui.QHBoxLayout(self.mtimeTab)
self.horizontalLayout_3.setObjectName(_fromUtf8("horizontalLayout_3"))
self.mtimePlot = Qwt5.QwtPlot(self.mtimeTab)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.mtimePlot.sizePolicy().hasHeightForWidth())
self.mtimePlot.setSizePolicy(sizePolicy)
self.mtimePlot.setObjectName(_fromUtf8("mtimePlot"))
self.horizontalLayout_3.addWidget(self.mtimePlot)
self.mtgroupBox = QtGui.QGroupBox(self.mtimeTab)
self.mtgroupBox.setCursor(QtGui.QCursor(QtCore.Qt.ArrowCursor))
self.mtgroupBox.setTitle(_fromUtf8(""))
self.mtgroupBox.setFlat(False)
self.mtgroupBox.setCheckable(False)
self.mtgroupBox.setObjectName(_fromUtf8("mtgroupBox"))
self.verticalLayout_5 = QtGui.QVBoxLayout(self.mtgroupBox)
self.verticalLayout_5.setObjectName(_fromUtf8("verticalLayout_5"))
self.mttapsPush = QtGui.QToolButton(self.mtgroupBox)
icon9 = QtGui.QIcon()
icon9.addPixmap(QtGui.QPixmap(_fromUtf8(":/icons/filtr_taps.svg")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.mttapsPush.setIcon(icon9)
self.mttapsPush.setIconSize(QtCore.QSize(16, 16))
self.mttapsPush.setCheckable(False)
self.mttapsPush.setObjectName(_fromUtf8("mttapsPush"))
self.verticalLayout_5.addWidget(self.mttapsPush)
self.mtstepPush = QtGui.QToolButton(self.mtgroupBox)
icon10 = QtGui.QIcon()
icon10.addPixmap(QtGui.QPixmap(_fromUtf8(":/icons/step_response.svg")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.mtstepPush.setIcon(icon10)
self.mtstepPush.setIconSize(QtCore.QSize(16, 16))
self.mtstepPush.setCheckable(False)
self.mtstepPush.setObjectName(_fromUtf8("mtstepPush"))
self.verticalLayout_5.addWidget(self.mtstepPush)
self.mtimpPush = QtGui.QToolButton(self.mtgroupBox)
icon11 = QtGui.QIcon()
icon11.addPixmap(QtGui.QPixmap(_fromUtf8(":/icons/impulse.svg")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.mtimpPush.setIcon(icon11)
self.mtimpPush.setIconSize(QtCore.QSize(16, 16))
self.mtimpPush.setCheckable(False)
self.mtimpPush.setObjectName(_fromUtf8("mtimpPush"))
self.verticalLayout_5.addWidget(self.mtimpPush)
self.horizontalLayout_3.addWidget(self.mtgroupBox)
self.mtimeTabgroup.addTab(self.mtimeTab, _fromUtf8(""))
self.mfilterspecView = QtGui.QTabWidget(self.splitter_3)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.mfilterspecView.sizePolicy().hasHeightForWidth())
self.mfilterspecView.setSizePolicy(sizePolicy)
self.mfilterspecView.setMinimumSize(QtCore.QSize(0, 100))
self.mfilterspecView.setBaseSize(QtCore.QSize(0, 100))
self.mfilterspecView.setDocumentMode(False)
self.mfilterspecView.setTabsClosable(False)
self.mfilterspecView.setObjectName(_fromUtf8("mfilterspecView"))
self.mbandDiagram = QtGui.QWidget()
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.mbandDiagram.sizePolicy().hasHeightForWidth())
self.mbandDiagram.setSizePolicy(sizePolicy)
self.mbandDiagram.setObjectName(_fromUtf8("mbandDiagram"))
self.horizontalLayout_15 = QtGui.QHBoxLayout(self.mbandDiagram)
self.horizontalLayout_15.setObjectName(_fromUtf8("horizontalLayout_15"))
self.mbandView = BandGraphicsView(self.mbandDiagram)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(1)
sizePolicy.setHeightForWidth(self.mbandView.sizePolicy().hasHeightForWidth())
self.mbandView.setSizePolicy(sizePolicy)
self.mbandView.setMinimumSize(QtCore.QSize(525, 249))
self.mbandView.setObjectName(_fromUtf8("mbandView"))
self.horizontalLayout_15.addWidget(self.mbandView)
self.mfilterspecView.addTab(self.mbandDiagram, _fromUtf8(""))
self.mpoleZero = QtGui.QWidget()
self.mpoleZero.setAutoFillBackground(False)
self.mpoleZero.setObjectName(_fromUtf8("mpoleZero"))
self.gridLayout_3 = QtGui.QGridLayout(self.mpoleZero)
self.gridLayout_3.setObjectName(_fromUtf8("gridLayout_3"))
self.mpzPlot = PzPlot(self.mpoleZero)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.mpzPlot.sizePolicy().hasHeightForWidth())
self.mpzPlot.setSizePolicy(sizePolicy)
self.mpzPlot.setObjectName(_fromUtf8("mpzPlot"))
self.gridLayout_3.addWidget(self.mpzPlot, 0, 0, 1, 1)
self.mpzgroupBox = QtGui.QGroupBox(self.mpoleZero)
self.mpzgroupBox.setCursor(QtGui.QCursor(QtCore.Qt.ArrowCursor))
self.mpzgroupBox.setTitle(_fromUtf8(""))
self.mpzgroupBox.setFlat(False)
self.mpzgroupBox.setCheckable(False)
self.mpzgroupBox.setObjectName(_fromUtf8("mpzgroupBox"))
self.verticalLayout_4 = QtGui.QVBoxLayout(self.mpzgroupBox)
self.verticalLayout_4.setObjectName(_fromUtf8("verticalLayout_4"))
self.maddzeroPush = QtGui.QToolButton(self.mpzgroupBox)
self.maddzeroPush.setIcon(icon)
self.maddzeroPush.setCheckable(True)
self.maddzeroPush.setObjectName(_fromUtf8("maddzeroPush"))
self.verticalLayout_4.addWidget(self.maddzeroPush)
self.maddpolePush = QtGui.QToolButton(self.mpzgroupBox)
self.maddpolePush.setIcon(icon1)
self.maddpolePush.setCheckable(True)
self.maddpolePush.setObjectName(_fromUtf8("maddpolePush"))
self.verticalLayout_4.addWidget(self.maddpolePush)
self.mdelPush = QtGui.QToolButton(self.mpzgroupBox)
self.mdelPush.setIcon(icon2)
self.mdelPush.setCheckable(True)
self.mdelPush.setObjectName(_fromUtf8("mdelPush"))
self.verticalLayout_4.addWidget(self.mdelPush)
self.mconjPush = QtGui.QToolButton(self.mpzgroupBox)
self.mconjPush.setIcon(icon3)
self.mconjPush.setIconSize(QtCore.QSize(16, 16))
self.mconjPush.setCheckable(True)
self.mconjPush.setObjectName(_fromUtf8("mconjPush"))
self.verticalLayout_4.addWidget(self.mconjPush)
self.gridLayout_3.addWidget(self.mpzgroupBox, 0, 1, 1, 1)
self.mpzstatusBar = QtGui.QStatusBar(self.mpoleZero)
self.mpzstatusBar.setObjectName(_fromUtf8("mpzstatusBar"))
self.gridLayout_3.addWidget(self.mpzstatusBar, 1, 0, 1, 2)
self.mfilterspecView.addTab(self.mpoleZero, _fromUtf8(""))
self.mfcTab = QtGui.QWidget()
self.mfcTab.setObjectName(_fromUtf8("mfcTab"))
self.horizontalLayout_4 = QtGui.QHBoxLayout(self.mfcTab)
self.horizontalLayout_4.setObjectName(_fromUtf8("horizontalLayout_4"))
self.mfilterCoeff = QtGui.QTextBrowser(self.mfcTab)
self.mfilterCoeff.setObjectName(_fromUtf8("mfilterCoeff"))
self.horizontalLayout_4.addWidget(self.mfilterCoeff)
self.mfilterspecView.addTab(self.mfcTab, _fromUtf8(""))
self.horizontalLayout_5.addWidget(self.splitter_3)
self.stackedWindows.addWidget(self.modern)
self.gridLayout.addWidget(self.stackedWindows, 0, 1, 1, 1)
self.filterFrame = QtGui.QFrame(self.centralwidget)
self.filterFrame.setMinimumSize(QtCore.QSize(300, 0))
self.filterFrame.setMaximumSize(QtCore.QSize(300, 16777215))
self.filterFrame.setFrameShape(QtGui.QFrame.StyledPanel)
self.filterFrame.setFrameShadow(QtGui.QFrame.Raised)
self.filterFrame.setObjectName(_fromUtf8("filterFrame"))
self.verticalLayout = QtGui.QVBoxLayout(self.filterFrame)
self.verticalLayout.setObjectName(_fromUtf8("verticalLayout"))
self.fselectComboBox = QtGui.QComboBox(self.filterFrame)
self.fselectComboBox.setEnabled(True)
self.fselectComboBox.setObjectName(_fromUtf8("fselectComboBox"))
self.fselectComboBox.addItem(_fromUtf8(""))
self.fselectComboBox.addItem(_fromUtf8(""))
self.verticalLayout.addWidget(self.fselectComboBox)
self.filterTypeComboBox = QtGui.QComboBox(self.filterFrame)
self.filterTypeComboBox.setObjectName(_fromUtf8("filterTypeComboBox"))
self.filterTypeComboBox.addItem(_fromUtf8(""))
self.filterTypeComboBox.addItem(_fromUtf8(""))
self.filterTypeComboBox.addItem(_fromUtf8(""))
self.filterTypeComboBox.addItem(_fromUtf8(""))
self.filterTypeComboBox.addItem(_fromUtf8(""))
self.filterTypeComboBox.addItem(_fromUtf8(""))
self.filterTypeComboBox.addItem(_fromUtf8(""))
self.filterTypeComboBox.addItem(_fromUtf8(""))
self.verticalLayout.addWidget(self.filterTypeComboBox)
self.iirfilterBandComboBox = QtGui.QComboBox(self.filterFrame)
self.iirfilterBandComboBox.setObjectName(_fromUtf8("iirfilterBandComboBox"))
self.iirfilterBandComboBox.addItem(_fromUtf8(""))
self.iirfilterBandComboBox.addItem(_fromUtf8(""))
self.iirfilterBandComboBox.addItem(_fromUtf8(""))
self.iirfilterBandComboBox.addItem(_fromUtf8(""))
self.verticalLayout.addWidget(self.iirfilterBandComboBox)
self.adComboBox = QtGui.QComboBox(self.filterFrame)
self.adComboBox.setObjectName(_fromUtf8("adComboBox"))
self.adComboBox.addItem(_fromUtf8(""))
self.adComboBox.addItem(_fromUtf8(""))
self.verticalLayout.addWidget(self.adComboBox)
self.filterDesignTypeComboBox = QtGui.QComboBox(self.filterFrame)
self.filterDesignTypeComboBox.setObjectName(_fromUtf8("filterDesignTypeComboBox"))
self.filterDesignTypeComboBox.addItem(_fromUtf8(""))
self.filterDesignTypeComboBox.addItem(_fromUtf8(""))
self.filterDesignTypeComboBox.addItem(_fromUtf8(""))
self.filterDesignTypeComboBox.addItem(_fromUtf8(""))
self.filterDesignTypeComboBox.addItem(_fromUtf8(""))
self.filterDesignTypeComboBox.addItem(_fromUtf8(""))
self.filterDesignTypeComboBox.addItem(_fromUtf8(""))
self.verticalLayout.addWidget(self.filterDesignTypeComboBox)
self.iirfilterTypeComboBox = QtGui.QComboBox(self.filterFrame)
self.iirfilterTypeComboBox.setObjectName(_fromUtf8("iirfilterTypeComboBox"))
self.iirfilterTypeComboBox.addItem(_fromUtf8(""))
self.iirfilterTypeComboBox.addItem(_fromUtf8(""))
self.iirfilterTypeComboBox.addItem(_fromUtf8(""))
self.iirfilterTypeComboBox.addItem(_fromUtf8(""))
self.iirfilterTypeComboBox.addItem(_fromUtf8(""))
self.verticalLayout.addWidget(self.iirfilterTypeComboBox)
self.globalParamsBox = QtGui.QGroupBox(self.filterFrame)
self.globalParamsBox.setTitle(_fromUtf8(""))
self.globalParamsBox.setObjectName(_fromUtf8("globalParamsBox"))
self.formLayout_12 = QtGui.QFormLayout(self.globalParamsBox)
self.formLayout_12.setFieldGrowthPolicy(QtGui.QFormLayout.AllNonFixedFieldsGrow)
self.formLayout_12.setObjectName(_fromUtf8("formLayout_12"))
self.sampleRateLabel = QtGui.QLabel(self.globalParamsBox)
self.sampleRateLabel.setMaximumSize(QtCore.QSize(16777215, 30))
self.sampleRateLabel.setObjectName(_fromUtf8("sampleRateLabel"))
self.formLayout_12.setWidget(0, QtGui.QFormLayout.LabelRole, self.sampleRateLabel)
self.sampleRateEdit = QtGui.QLineEdit(self.globalParamsBox)
self.sampleRateEdit.setMaximumSize(QtCore.QSize(16777215, 30))
self.sampleRateEdit.setObjectName(_fromUtf8("sampleRateEdit"))
self.formLayout_12.setWidget(0, QtGui.QFormLayout.FieldRole, self.sampleRateEdit)
self.filterGainLabel = QtGui.QLabel(self.globalParamsBox)
self.filterGainLabel.setObjectName(_fromUtf8("filterGainLabel"))
self.formLayout_12.setWidget(1, QtGui.QFormLayout.LabelRole, self.filterGainLabel)
self.filterGainEdit = QtGui.QLineEdit(self.globalParamsBox)
self.filterGainEdit.setObjectName(_fromUtf8("filterGainEdit"))
self.formLayout_12.setWidget(1, QtGui.QFormLayout.FieldRole, self.filterGainEdit)
self.verticalLayout.addWidget(self.globalParamsBox)
self.filterTypeWidget = QtGui.QStackedWidget(self.filterFrame)
self.filterTypeWidget.setObjectName(_fromUtf8("filterTypeWidget"))
self.firlpfPage = QtGui.QWidget()
self.firlpfPage.setObjectName(_fromUtf8("firlpfPage"))
self.formLayout = QtGui.QFormLayout(self.firlpfPage)
self.formLayout.setFieldGrowthPolicy(QtGui.QFormLayout.AllNonFixedFieldsGrow)
self.formLayout.setObjectName(_fromUtf8("formLayout"))
self.endofLpfPassBandLabel = QtGui.QLabel(self.firlpfPage)
self.endofLpfPassBandLabel.setObjectName(_fromUtf8("endofLpfPassBandLabel"))
self.formLayout.setWidget(0, QtGui.QFormLayout.LabelRole, self.endofLpfPassBandLabel)
self.endofLpfPassBandEdit = QtGui.QLineEdit(self.firlpfPage)
self.endofLpfPassBandEdit.setObjectName(_fromUtf8("endofLpfPassBandEdit"))
self.formLayout.setWidget(0, QtGui.QFormLayout.FieldRole, self.endofLpfPassBandEdit)
self.startofLpfStopBandLabel = QtGui.QLabel(self.firlpfPage)
self.startofLpfStopBandLabel.setObjectName(_fromUtf8("startofLpfStopBandLabel"))
self.formLayout.setWidget(1, QtGui.QFormLayout.LabelRole, self.startofLpfStopBandLabel)
self.startofLpfStopBandEdit = QtGui.QLineEdit(self.firlpfPage)
self.startofLpfStopBandEdit.setObjectName(_fromUtf8("startofLpfStopBandEdit"))
self.formLayout.setWidget(1, QtGui.QFormLayout.FieldRole, self.startofLpfStopBandEdit)
self.lpfStopBandAttenLabel = QtGui.QLabel(self.firlpfPage)
self.lpfStopBandAttenLabel.setObjectName(_fromUtf8("lpfStopBandAttenLabel"))
self.formLayout.setWidget(2, QtGui.QFormLayout.LabelRole, self.lpfStopBandAttenLabel)
self.lpfStopBandAttenEdit = QtGui.QLineEdit(self.firlpfPage)
self.lpfStopBandAttenEdit.setObjectName(_fromUtf8("lpfStopBandAttenEdit"))
self.formLayout.setWidget(2, QtGui.QFormLayout.FieldRole, self.lpfStopBandAttenEdit)
self.lpfPassBandRippleLabel = QtGui.QLabel(self.firlpfPage)
self.lpfPassBandRippleLabel.setObjectName(_fromUtf8("lpfPassBandRippleLabel"))
self.formLayout.setWidget(3, QtGui.QFormLayout.LabelRole, self.lpfPassBandRippleLabel)
self.lpfPassBandRippleEdit = QtGui.QLineEdit(self.firlpfPage)
self.lpfPassBandRippleEdit.setObjectName(_fromUtf8("lpfPassBandRippleEdit"))
self.formLayout.setWidget(3, QtGui.QFormLayout.FieldRole, self.lpfPassBandRippleEdit)
self.filterTypeWidget.addWidget(self.firlpfPage)
self.firbpfPage = QtGui.QWidget()
self.firbpfPage.setObjectName(_fromUtf8("firbpfPage"))
self.formLayout_2 = QtGui.QFormLayout(self.firbpfPage)
self.formLayout_2.setObjectName(_fromUtf8("formLayout_2"))
self.startofBpfPassBandLabel = QtGui.QLabel(self.firbpfPage)
self.startofBpfPassBandLabel.setObjectName(_fromUtf8("startofBpfPassBandLabel"))
self.formLayout_2.setWidget(0, QtGui.QFormLayout.LabelRole, self.startofBpfPassBandLabel)
self.startofBpfPassBandEdit = QtGui.QLineEdit(self.firbpfPage)
self.startofBpfPassBandEdit.setObjectName(_fromUtf8("startofBpfPassBandEdit"))
self.formLayout_2.setWidget(0, QtGui.QFormLayout.FieldRole, self.startofBpfPassBandEdit)
self.endofBpfPassBandLabel = QtGui.QLabel(self.firbpfPage)
self.endofBpfPassBandLabel.setObjectName(_fromUtf8("endofBpfPassBandLabel"))
self.formLayout_2.setWidget(1, QtGui.QFormLayout.LabelRole, self.endofBpfPassBandLabel)
self.endofBpfPassBandEdit = QtGui.QLineEdit(self.firbpfPage)
self.endofBpfPassBandEdit.setObjectName(_fromUtf8("endofBpfPassBandEdit"))
self.formLayout_2.setWidget(1, QtGui.QFormLayout.FieldRole, self.endofBpfPassBandEdit)
self.bpfStopBandAttenEdit = QtGui.QLineEdit(self.firbpfPage)
self.bpfStopBandAttenEdit.setObjectName(_fromUtf8("bpfStopBandAttenEdit"))
self.formLayout_2.setWidget(3, QtGui.QFormLayout.FieldRole, self.bpfStopBandAttenEdit)
self.bpfStopBandAttenLabel = QtGui.QLabel(self.firbpfPage)
self.bpfStopBandAttenLabel.setObjectName(_fromUtf8("bpfStopBandAttenLabel"))
self.formLayout_2.setWidget(3, QtGui.QFormLayout.LabelRole, self.bpfStopBandAttenLabel)
self.bpfTransitionLabel = QtGui.QLabel(self.firbpfPage)
self.bpfTransitionLabel.setObjectName(_fromUtf8("bpfTransitionLabel"))
self.formLayout_2.setWidget(2, QtGui.QFormLayout.LabelRole, self.bpfTransitionLabel)
self.bpfTransitionEdit = QtGui.QLineEdit(self.firbpfPage)
self.bpfTransitionEdit.setObjectName(_fromUtf8("bpfTransitionEdit"))
self.formLayout_2.setWidget(2, QtGui.QFormLayout.FieldRole, self.bpfTransitionEdit)
self.bpfPassBandRippleEdit = QtGui.QLineEdit(self.firbpfPage)
self.bpfPassBandRippleEdit.setObjectName(_fromUtf8("bpfPassBandRippleEdit"))
self.formLayout_2.setWidget(4, QtGui.QFormLayout.FieldRole, self.bpfPassBandRippleEdit)
self.bpfPassBandRippleLabel = QtGui.QLabel(self.firbpfPage)
self.bpfPassBandRippleLabel.setObjectName(_fromUtf8("bpfPassBandRippleLabel"))
self.formLayout_2.setWidget(4, QtGui.QFormLayout.LabelRole, self.bpfPassBandRippleLabel)
self.filterTypeWidget.addWidget(self.firbpfPage)
self.firbnfPage = QtGui.QWidget()
self.firbnfPage.setObjectName(_fromUtf8("firbnfPage"))
self.formLayout_5 = QtGui.QFormLayout(self.firbnfPage)
self.formLayout_5.setFieldGrowthPolicy(QtGui.QFormLayout.AllNonFixedFieldsGrow)
self.formLayout_5.setObjectName(_fromUtf8("formLayout_5"))
self.startofBnfStopBandLabel = QtGui.QLabel(self.firbnfPage)
self.startofBnfStopBandLabel.setObjectName(_fromUtf8("startofBnfStopBandLabel"))
self.formLayout_5.setWidget(0, QtGui.QFormLayout.LabelRole, self.startofBnfStopBandLabel)
self.startofBnfStopBandEdit = QtGui.QLineEdit(self.firbnfPage)
self.startofBnfStopBandEdit.setObjectName(_fromUtf8("startofBnfStopBandEdit"))
self.formLayout_5.setWidget(0, QtGui.QFormLayout.FieldRole, self.startofBnfStopBandEdit)
self.endofBnfStopBandLabel = QtGui.QLabel(self.firbnfPage)
self.endofBnfStopBandLabel.setObjectName(_fromUtf8("endofBnfStopBandLabel"))
self.formLayout_5.setWidget(1, QtGui.QFormLayout.LabelRole, self.endofBnfStopBandLabel)
self.endofBnfStopBandEdit = QtGui.QLineEdit(self.firbnfPage)
self.endofBnfStopBandEdit.setObjectName(_fromUtf8("endofBnfStopBandEdit"))
self.formLayout_5.setWidget(1, QtGui.QFormLayout.FieldRole, self.endofBnfStopBandEdit)
self.bnfTransitionLabel = QtGui.QLabel(self.firbnfPage)
self.bnfTransitionLabel.setObjectName(_fromUtf8("bnfTransitionLabel"))
self.formLayout_5.setWidget(2, QtGui.QFormLayout.LabelRole, self.bnfTransitionLabel)
self.bnfTransitionEdit = QtGui.QLineEdit(self.firbnfPage)
self.bnfTransitionEdit.setObjectName(_fromUtf8("bnfTransitionEdit"))
self.formLayout_5.setWidget(2, QtGui.QFormLayout.FieldRole, self.bnfTransitionEdit)
self.bnfStopBandAttenLabel = QtGui.QLabel(self.firbnfPage)
self.bnfStopBandAttenLabel.setObjectName(_fromUtf8("bnfStopBandAttenLabel"))
self.formLayout_5.setWidget(3, QtGui.QFormLayout.LabelRole, self.bnfStopBandAttenLabel)
self.bnfStopBandAttenEdit = QtGui.QLineEdit(self.firbnfPage)
self.bnfStopBandAttenEdit.setObjectName(_fromUtf8("bnfStopBandAttenEdit"))
self.formLayout_5.setWidget(3, QtGui.QFormLayout.FieldRole, self.bnfStopBandAttenEdit)
self.bnfPassBandRippleLabel = QtGui.QLabel(self.firbnfPage)
self.bnfPassBandRippleLabel.setObjectName(_fromUtf8("bnfPassBandRippleLabel"))
self.formLayout_5.setWidget(4, QtGui.QFormLayout.LabelRole, self.bnfPassBandRippleLabel)
self.bnfPassBandRippleEdit = QtGui.QLineEdit(self.firbnfPage)
self.bnfPassBandRippleEdit.setObjectName(_fromUtf8("bnfPassBandRippleEdit"))
self.formLayout_5.setWidget(4, QtGui.QFormLayout.FieldRole, self.bnfPassBandRippleEdit)
self.filterTypeWidget.addWidget(self.firbnfPage)
self.firhpfPage = QtGui.QWidget()
self.firhpfPage.setObjectName(_fromUtf8("firhpfPage"))
self.formLayout_3 = QtGui.QFormLayout(self.firhpfPage)
self.formLayout_3.setFieldGrowthPolicy(QtGui.QFormLayout.AllNonFixedFieldsGrow)
self.formLayout_3.setObjectName(_fromUtf8("formLayout_3"))
self.endofHpfStopBandLabel = QtGui.QLabel(self.firhpfPage)
self.endofHpfStopBandLabel.setObjectName(_fromUtf8("endofHpfStopBandLabel"))
self.formLayout_3.setWidget(0, QtGui.QFormLayout.LabelRole, self.endofHpfStopBandLabel)
self.endofHpfStopBandEdit = QtGui.QLineEdit(self.firhpfPage)
self.endofHpfStopBandEdit.setObjectName(_fromUtf8("endofHpfStopBandEdit"))
self.formLayout_3.setWidget(0, QtGui.QFormLayout.FieldRole, self.endofHpfStopBandEdit)
self.startofHpfPassBandLabel = QtGui.QLabel(self.firhpfPage)
self.startofHpfPassBandLabel.setObjectName(_fromUtf8("startofHpfPassBandLabel"))
self.formLayout_3.setWidget(1, QtGui.QFormLayout.LabelRole, self.startofHpfPassBandLabel)
self.startofHpfPassBandEdit = QtGui.QLineEdit(self.firhpfPage)
self.startofHpfPassBandEdit.setObjectName(_fromUtf8("startofHpfPassBandEdit"))
self.formLayout_3.setWidget(1, QtGui.QFormLayout.FieldRole, self.startofHpfPassBandEdit)
self.hpfStopBandAttenLabel = QtGui.QLabel(self.firhpfPage)
self.hpfStopBandAttenLabel.setObjectName(_fromUtf8("hpfStopBandAttenLabel"))
self.formLayout_3.setWidget(2, QtGui.QFormLayout.LabelRole, self.hpfStopBandAttenLabel)
self.hpfStopBandAttenEdit = QtGui.QLineEdit(self.firhpfPage)
self.hpfStopBandAttenEdit.setObjectName(_fromUtf8("hpfStopBandAttenEdit"))
self.formLayout_3.setWidget(2, QtGui.QFormLayout.FieldRole, self.hpfStopBandAttenEdit)
self.hpfPassBandRippleLabel = QtGui.QLabel(self.firhpfPage)
self.hpfPassBandRippleLabel.setObjectName(_fromUtf8("hpfPassBandRippleLabel"))
self.formLayout_3.setWidget(3, QtGui.QFormLayout.LabelRole, self.hpfPassBandRippleLabel)
self.hpfPassBandRippleEdit = QtGui.QLineEdit(self.firhpfPage)
self.hpfPassBandRippleEdit.setObjectName(_fromUtf8("hpfPassBandRippleEdit"))
self.formLayout_3.setWidget(3, QtGui.QFormLayout.FieldRole, self.hpfPassBandRippleEdit)
self.filterTypeWidget.addWidget(self.firhpfPage)
self.rrcPage = QtGui.QWidget()
self.rrcPage.setObjectName(_fromUtf8("rrcPage"))
self.formLayout_6 = QtGui.QFormLayout(self.rrcPage)
self.formLayout_6.setObjectName(_fromUtf8("formLayout_6"))
self.rrcSymbolRateLabel = QtGui.QLabel(self.rrcPage)
self.rrcSymbolRateLabel.setObjectName(_fromUtf8("rrcSymbolRateLabel"))
self.formLayout_6.setWidget(0, QtGui.QFormLayout.LabelRole, self.rrcSymbolRateLabel)
self.rrcAlphaLabel = QtGui.QLabel(self.rrcPage)
self.rrcAlphaLabel.setObjectName(_fromUtf8("rrcAlphaLabel"))
self.formLayout_6.setWidget(1, QtGui.QFormLayout.LabelRole, self.rrcAlphaLabel)
self.rrcNumTapsLabel = QtGui.QLabel(self.rrcPage)
self.rrcNumTapsLabel.setObjectName(_fromUtf8("rrcNumTapsLabel"))
self.formLayout_6.setWidget(2, QtGui.QFormLayout.LabelRole, self.rrcNumTapsLabel)
self.rrcSymbolRateEdit = QtGui.QLineEdit(self.rrcPage)
self.rrcSymbolRateEdit.setObjectName(_fromUtf8("rrcSymbolRateEdit"))
self.formLayout_6.setWidget(0, QtGui.QFormLayout.FieldRole, self.rrcSymbolRateEdit)
self.rrcAlphaEdit = QtGui.QLineEdit(self.rrcPage)
self.rrcAlphaEdit.setObjectName(_fromUtf8("rrcAlphaEdit"))
self.formLayout_6.setWidget(1, QtGui.QFormLayout.FieldRole, self.rrcAlphaEdit)
self.rrcNumTapsEdit = QtGui.QLineEdit(self.rrcPage)
self.rrcNumTapsEdit.setObjectName(_fromUtf8("rrcNumTapsEdit"))
self.formLayout_6.setWidget(2, QtGui.QFormLayout.FieldRole, self.rrcNumTapsEdit)
self.filterTypeWidget.addWidget(self.rrcPage)
self.gausPage = QtGui.QWidget()
self.gausPage.setObjectName(_fromUtf8("gausPage"))
self.formLayout_7 = QtGui.QFormLayout(self.gausPage)
self.formLayout_7.setObjectName(_fromUtf8("formLayout_7"))
self.gausSymbolRateLabel = QtGui.QLabel(self.gausPage)
self.gausSymbolRateLabel.setObjectName(_fromUtf8("gausSymbolRateLabel"))
self.formLayout_7.setWidget(0, QtGui.QFormLayout.LabelRole, self.gausSymbolRateLabel)
self.gausSymbolRateEdit = QtGui.QLineEdit(self.gausPage)
self.gausSymbolRateEdit.setObjectName(_fromUtf8("gausSymbolRateEdit"))
self.formLayout_7.setWidget(0, QtGui.QFormLayout.FieldRole, self.gausSymbolRateEdit)
self.gausBTLabel = QtGui.QLabel(self.gausPage)
self.gausBTLabel.setObjectName(_fromUtf8("gausBTLabel"))
self.formLayout_7.setWidget(1, QtGui.QFormLayout.LabelRole, self.gausBTLabel)
self.gausBTEdit = QtGui.QLineEdit(self.gausPage)
self.gausBTEdit.setObjectName(_fromUtf8("gausBTEdit"))
self.formLayout_7.setWidget(1, QtGui.QFormLayout.FieldRole, self.gausBTEdit)
self.gausNumTapsLabel = QtGui.QLabel(self.gausPage)
self.gausNumTapsLabel.setObjectName(_fromUtf8("gausNumTapsLabel"))
self.formLayout_7.setWidget(2, QtGui.QFormLayout.LabelRole, self.gausNumTapsLabel)
self.gausNumTapsEdit = QtGui.QLineEdit(self.gausPage)
self.gausNumTapsEdit.setObjectName(_fromUtf8("gausNumTapsEdit"))
self.formLayout_7.setWidget(2, QtGui.QFormLayout.FieldRole, self.gausNumTapsEdit)
self.filterTypeWidget.addWidget(self.gausPage)
self.iirlpfPage = QtGui.QWidget()
self.iirlpfPage.setObjectName(_fromUtf8("iirlpfPage"))
self.formLayout_15 = QtGui.QFormLayout(self.iirlpfPage)
self.formLayout_15.setObjectName(_fromUtf8("formLayout_15"))
self.iirendofLpfPassBandLabel = QtGui.QLabel(self.iirlpfPage)
self.iirendofLpfPassBandLabel.setObjectName(_fromUtf8("iirendofLpfPassBandLabel"))
self.formLayout_15.setWidget(0, QtGui.QFormLayout.LabelRole, self.iirendofLpfPassBandLabel)
self.iirendofLpfPassBandEdit = QtGui.QLineEdit(self.iirlpfPage)
self.iirendofLpfPassBandEdit.setObjectName(_fromUtf8("iirendofLpfPassBandEdit"))
self.formLayout_15.setWidget(0, QtGui.QFormLayout.FieldRole, self.iirendofLpfPassBandEdit)
self.iirstartofLpfStopBandLabel = QtGui.QLabel(self.iirlpfPage)
self.iirstartofLpfStopBandLabel.setObjectName(_fromUtf8("iirstartofLpfStopBandLabel"))
self.formLayout_15.setWidget(1, QtGui.QFormLayout.LabelRole, self.iirstartofLpfStopBandLabel)
self.iirstartofLpfStopBandEdit = QtGui.QLineEdit(self.iirlpfPage)
self.iirstartofLpfStopBandEdit.setObjectName(_fromUtf8("iirstartofLpfStopBandEdit"))
self.formLayout_15.setWidget(1, QtGui.QFormLayout.FieldRole, self.iirstartofLpfStopBandEdit)
self.iirLpfPassBandAttenLabel = QtGui.QLabel(self.iirlpfPage)
self.iirLpfPassBandAttenLabel.setObjectName(_fromUtf8("iirLpfPassBandAttenLabel"))
self.formLayout_15.setWidget(2, QtGui.QFormLayout.LabelRole, self.iirLpfPassBandAttenLabel)
self.iirLpfPassBandAttenEdit = QtGui.QLineEdit(self.iirlpfPage)
self.iirLpfPassBandAttenEdit.setObjectName(_fromUtf8("iirLpfPassBandAttenEdit"))
self.formLayout_15.setWidget(2, QtGui.QFormLayout.FieldRole, self.iirLpfPassBandAttenEdit)
self.iirLpfStopBandRippleLabel = QtGui.QLabel(self.iirlpfPage)
self.iirLpfStopBandRippleLabel.setObjectName(_fromUtf8("iirLpfStopBandRippleLabel"))
self.formLayout_15.setWidget(3, QtGui.QFormLayout.LabelRole, self.iirLpfStopBandRippleLabel)
self.iirLpfStopBandRippleEdit = QtGui.QLineEdit(self.iirlpfPage)
self.iirLpfStopBandRippleEdit.setObjectName(_fromUtf8("iirLpfStopBandRippleEdit"))
self.formLayout_15.setWidget(3, QtGui.QFormLayout.FieldRole, self.iirLpfStopBandRippleEdit)
self.filterTypeWidget.addWidget(self.iirlpfPage)
self.iirhpfPage = QtGui.QWidget()
self.iirhpfPage.setObjectName(_fromUtf8("iirhpfPage"))
self.formLayout_9 = QtGui.QFormLayout(self.iirhpfPage)
self.formLayout_9.setObjectName(_fromUtf8("formLayout_9"))
self.iirendofHpfStopBandLabel = QtGui.QLabel(self.iirhpfPage)
self.iirendofHpfStopBandLabel.setObjectName(_fromUtf8("iirendofHpfStopBandLabel"))
self.formLayout_9.setWidget(0, QtGui.QFormLayout.LabelRole, self.iirendofHpfStopBandLabel)
self.iirendofHpfStopBandEdit = QtGui.QLineEdit(self.iirhpfPage)
self.iirendofHpfStopBandEdit.setObjectName(_fromUtf8("iirendofHpfStopBandEdit"))
self.formLayout_9.setWidget(0, QtGui.QFormLayout.FieldRole, self.iirendofHpfStopBandEdit)
self.iirstartofHpfPassBandLabel = QtGui.QLabel(self.iirhpfPage)
self.iirstartofHpfPassBandLabel.setObjectName(_fromUtf8("iirstartofHpfPassBandLabel"))
self.formLayout_9.setWidget(1, QtGui.QFormLayout.LabelRole, self.iirstartofHpfPassBandLabel)
self.iirstartofHpfPassBandEdit = QtGui.QLineEdit(self.iirhpfPage)
self.iirstartofHpfPassBandEdit.setObjectName(_fromUtf8("iirstartofHpfPassBandEdit"))
self.formLayout_9.setWidget(1, QtGui.QFormLayout.FieldRole, self.iirstartofHpfPassBandEdit)
self.iirHpfPassBandAttenLabel = QtGui.QLabel(self.iirhpfPage)
self.iirHpfPassBandAttenLabel.setObjectName(_fromUtf8("iirHpfPassBandAttenLabel"))
self.formLayout_9.setWidget(2, QtGui.QFormLayout.LabelRole, self.iirHpfPassBandAttenLabel)
self.iirHpfPassBandAttenEdit = QtGui.QLineEdit(self.iirhpfPage)
self.iirHpfPassBandAttenEdit.setObjectName(_fromUtf8("iirHpfPassBandAttenEdit"))
self.formLayout_9.setWidget(2, QtGui.QFormLayout.FieldRole, self.iirHpfPassBandAttenEdit)
self.iirHpfStopBandRippleLabel = QtGui.QLabel(self.iirhpfPage)
self.iirHpfStopBandRippleLabel.setObjectName(_fromUtf8("iirHpfStopBandRippleLabel"))
self.formLayout_9.setWidget(3, QtGui.QFormLayout.LabelRole, self.iirHpfStopBandRippleLabel)
self.iirHpfStopBandRippleEdit = QtGui.QLineEdit(self.iirhpfPage)
self.iirHpfStopBandRippleEdit.setObjectName(_fromUtf8("iirHpfStopBandRippleEdit"))
self.formLayout_9.setWidget(3, QtGui.QFormLayout.FieldRole, self.iirHpfStopBandRippleEdit)
self.filterTypeWidget.addWidget(self.iirhpfPage)
self.iirbpfPage = QtGui.QWidget()
self.iirbpfPage.setObjectName(_fromUtf8("iirbpfPage"))
self.formLayout_10 = QtGui.QFormLayout(self.iirbpfPage)
self.formLayout_10.setObjectName(_fromUtf8("formLayout_10"))
self.iirendofBpfStopBandLabel1 = QtGui.QLabel(self.iirbpfPage)
self.iirendofBpfStopBandLabel1.setObjectName(_fromUtf8("iirendofBpfStopBandLabel1"))
self.formLayout_10.setWidget(0, QtGui.QFormLayout.LabelRole, self.iirendofBpfStopBandLabel1)
self.iirendofBpfStopBandEdit1 = QtGui.QLineEdit(self.iirbpfPage)
self.iirendofBpfStopBandEdit1.setObjectName(_fromUtf8("iirendofBpfStopBandEdit1"))
self.formLayout_10.setWidget(0, QtGui.QFormLayout.FieldRole, self.iirendofBpfStopBandEdit1)
self.iirstartofBpfPassBandLabel = QtGui.QLabel(self.iirbpfPage)
self.iirstartofBpfPassBandLabel.setObjectName(_fromUtf8("iirstartofBpfPassBandLabel"))
self.formLayout_10.setWidget(1, QtGui.QFormLayout.LabelRole, self.iirstartofBpfPassBandLabel)
self.iirstartofBpfPassBandEdit = QtGui.QLineEdit(self.iirbpfPage)
self.iirstartofBpfPassBandEdit.setObjectName(_fromUtf8("iirstartofBpfPassBandEdit"))
self.formLayout_10.setWidget(1, QtGui.QFormLayout.FieldRole, self.iirstartofBpfPassBandEdit)
self.iirendofBpfPassBandLabel = QtGui.QLabel(self.iirbpfPage)
self.iirendofBpfPassBandLabel.setObjectName(_fromUtf8("iirendofBpfPassBandLabel"))
self.formLayout_10.setWidget(2, QtGui.QFormLayout.LabelRole, self.iirendofBpfPassBandLabel)
self.iirendofBpfPassBandEdit = QtGui.QLineEdit(self.iirbpfPage)
self.iirendofBpfPassBandEdit.setObjectName(_fromUtf8("iirendofBpfPassBandEdit"))
self.formLayout_10.setWidget(2, QtGui.QFormLayout.FieldRole, self.iirendofBpfPassBandEdit)
self.iirstartofBpfStopBandLabel2 = QtGui.QLabel(self.iirbpfPage)
self.iirstartofBpfStopBandLabel2.setObjectName(_fromUtf8("iirstartofBpfStopBandLabel2"))
self.formLayout_10.setWidget(3, QtGui.QFormLayout.LabelRole, self.iirstartofBpfStopBandLabel2)
self.iirstartofBpfStopBandEdit2 = QtGui.QLineEdit(self.iirbpfPage)
self.iirstartofBpfStopBandEdit2.setObjectName(_fromUtf8("iirstartofBpfStopBandEdit2"))
self.formLayout_10.setWidget(3, QtGui.QFormLayout.FieldRole, self.iirstartofBpfStopBandEdit2)
self.iirBpfPassBandAttenLabel = QtGui.QLabel(self.iirbpfPage)
self.iirBpfPassBandAttenLabel.setObjectName(_fromUtf8("iirBpfPassBandAttenLabel"))
self.formLayout_10.setWidget(4, QtGui.QFormLayout.LabelRole, self.iirBpfPassBandAttenLabel)
self.iirBpfPassBandAttenEdit = QtGui.QLineEdit(self.iirbpfPage)
self.iirBpfPassBandAttenEdit.setObjectName(_fromUtf8("iirBpfPassBandAttenEdit"))
self.formLayout_10.setWidget(4, QtGui.QFormLayout.FieldRole, self.iirBpfPassBandAttenEdit)
self.iirBpfStopBandRippleLabel = QtGui.QLabel(self.iirbpfPage)
self.iirBpfStopBandRippleLabel.setObjectName(_fromUtf8("iirBpfStopBandRippleLabel"))
self.formLayout_10.setWidget(5, QtGui.QFormLayout.LabelRole, self.iirBpfStopBandRippleLabel)
self.iirBpfStopBandRippleEdit = QtGui.QLineEdit(self.iirbpfPage)
self.iirBpfStopBandRippleEdit.setObjectName(_fromUtf8("iirBpfStopBandRippleEdit"))
self.formLayout_10.setWidget(5, QtGui.QFormLayout.FieldRole, self.iirBpfStopBandRippleEdit)
self.filterTypeWidget.addWidget(self.iirbpfPage)
self.iirbsfPage = QtGui.QWidget()
self.iirbsfPage.setObjectName(_fromUtf8("iirbsfPage"))
self.formLayout_11 = QtGui.QFormLayout(self.iirbsfPage)
self.formLayout_11.setObjectName(_fromUtf8("formLayout_11"))
self.iirendofBsfPassBandLabel1 = QtGui.QLabel(self.iirbsfPage)
self.iirendofBsfPassBandLabel1.setObjectName(_fromUtf8("iirendofBsfPassBandLabel1"))
self.formLayout_11.setWidget(0, QtGui.QFormLayout.LabelRole, self.iirendofBsfPassBandLabel1)
self.iirendofBsfPassBandEdit1 = QtGui.QLineEdit(self.iirbsfPage)
self.iirendofBsfPassBandEdit1.setObjectName(_fromUtf8("iirendofBsfPassBandEdit1"))
self.formLayout_11.setWidget(0, QtGui.QFormLayout.FieldRole, self.iirendofBsfPassBandEdit1)
self.iirstartofBsfStopBandLabel = QtGui.QLabel(self.iirbsfPage)
self.iirstartofBsfStopBandLabel.setObjectName(_fromUtf8("iirstartofBsfStopBandLabel"))
self.formLayout_11.setWidget(1, QtGui.QFormLayout.LabelRole, self.iirstartofBsfStopBandLabel)
self.iirstartofBsfStopBandEdit = QtGui.QLineEdit(self.iirbsfPage)
self.iirstartofBsfStopBandEdit.setObjectName(_fromUtf8("iirstartofBsfStopBandEdit"))
self.formLayout_11.setWidget(1, QtGui.QFormLayout.FieldRole, self.iirstartofBsfStopBandEdit)
self.iirendofBsfStopBandLabel = QtGui.QLabel(self.iirbsfPage)
self.iirendofBsfStopBandLabel.setObjectName(_fromUtf8("iirendofBsfStopBandLabel"))
self.formLayout_11.setWidget(2, QtGui.QFormLayout.LabelRole, self.iirendofBsfStopBandLabel)
self.iirendofBsfStopBandEdit = QtGui.QLineEdit(self.iirbsfPage)
self.iirendofBsfStopBandEdit.setObjectName(_fromUtf8("iirendofBsfStopBandEdit"))
self.formLayout_11.setWidget(2, QtGui.QFormLayout.FieldRole, self.iirendofBsfStopBandEdit)
self.iirstartofBsfPassBandLabel2 = QtGui.QLabel(self.iirbsfPage)
self.iirstartofBsfPassBandLabel2.setObjectName(_fromUtf8("iirstartofBsfPassBandLabel2"))
self.formLayout_11.setWidget(3, QtGui.QFormLayout.LabelRole, self.iirstartofBsfPassBandLabel2)
self.iirstartofBsfPassBandEdit2 = QtGui.QLineEdit(self.iirbsfPage)
self.iirstartofBsfPassBandEdit2.setObjectName(_fromUtf8("iirstartofBsfPassBandEdit2"))
self.formLayout_11.setWidget(3, QtGui.QFormLayout.FieldRole, self.iirstartofBsfPassBandEdit2)
self.iirBsfPassBandAttenLabel = QtGui.QLabel(self.iirbsfPage)
self.iirBsfPassBandAttenLabel.setObjectName(_fromUtf8("iirBsfPassBandAttenLabel"))
self.formLayout_11.setWidget(4, QtGui.QFormLayout.LabelRole, self.iirBsfPassBandAttenLabel)
self.iirBsfPassBandAttenEdit = QtGui.QLineEdit(self.iirbsfPage)
self.iirBsfPassBandAttenEdit.setObjectName(_fromUtf8("iirBsfPassBandAttenEdit"))
self.formLayout_11.setWidget(4, QtGui.QFormLayout.FieldRole, self.iirBsfPassBandAttenEdit)
self.iirBsfStopBandRippleLabel = QtGui.QLabel(self.iirbsfPage)
self.iirBsfStopBandRippleLabel.setObjectName(_fromUtf8("iirBsfStopBandRippleLabel"))
self.formLayout_11.setWidget(5, QtGui.QFormLayout.LabelRole, self.iirBsfStopBandRippleLabel)
self.iirBsfStopBandRippleEdit = QtGui.QLineEdit(self.iirbsfPage)
self.iirBsfStopBandRippleEdit.setObjectName(_fromUtf8("iirBsfStopBandRippleEdit"))
self.formLayout_11.setWidget(5, QtGui.QFormLayout.FieldRole, self.iirBsfStopBandRippleEdit)
self.filterTypeWidget.addWidget(self.iirbsfPage)
self.iirbesselPage = QtGui.QWidget()
self.iirbesselPage.setObjectName(_fromUtf8("iirbesselPage"))
self.formLayout_13 = QtGui.QFormLayout(self.iirbesselPage)
self.formLayout_13.setFieldGrowthPolicy(QtGui.QFormLayout.AllNonFixedFieldsGrow)
self.formLayout_13.setObjectName(_fromUtf8("formLayout_13"))
self.besselordLabel = QtGui.QLabel(self.iirbesselPage)
self.besselordLabel.setObjectName(_fromUtf8("besselordLabel"))
self.formLayout_13.setWidget(0, QtGui.QFormLayout.LabelRole, self.besselordLabel)
self.besselordEdit = QtGui.QLineEdit(self.iirbesselPage)
self.besselordEdit.setObjectName(_fromUtf8("besselordEdit"))
self.formLayout_13.setWidget(0, QtGui.QFormLayout.FieldRole, self.besselordEdit)
self.iirbesselcritLabel1 = QtGui.QLabel(self.iirbesselPage)
self.iirbesselcritLabel1.setObjectName(_fromUtf8("iirbesselcritLabel1"))
self.formLayout_13.setWidget(1, QtGui.QFormLayout.LabelRole, self.iirbesselcritLabel1)
self.iirbesselcritEdit1 = QtGui.QLineEdit(self.iirbesselPage)
self.iirbesselcritEdit1.setObjectName(_fromUtf8("iirbesselcritEdit1"))
self.formLayout_13.setWidget(1, QtGui.QFormLayout.FieldRole, self.iirbesselcritEdit1)
self.iirbesselcritEdit2 = QtGui.QLineEdit(self.iirbesselPage)
self.iirbesselcritEdit2.setObjectName(_fromUtf8("iirbesselcritEdit2"))
self.formLayout_13.setWidget(2, QtGui.QFormLayout.FieldRole, self.iirbesselcritEdit2)
self.iirbesselcritLabel2 = QtGui.QLabel(self.iirbesselPage)
self.iirbesselcritLabel2.setObjectName(_fromUtf8("iirbesselcritLabel2"))
self.formLayout_13.setWidget(2, QtGui.QFormLayout.LabelRole, self.iirbesselcritLabel2)
self.filterTypeWidget.addWidget(self.iirbesselPage)
self.firhbPage = QtGui.QWidget()
self.firhbPage.setObjectName(_fromUtf8("firhbPage"))
self.formLayout_14 = QtGui.QFormLayout(self.firhbPage)
self.formLayout_14.setFieldGrowthPolicy(QtGui.QFormLayout.AllNonFixedFieldsGrow)
self.formLayout_14.setObjectName(_fromUtf8("formLayout_14"))
self.firhbordLabel = QtGui.QLabel(self.firhbPage)
self.firhbordLabel.setObjectName(_fromUtf8("firhbordLabel"))
self.formLayout_14.setWidget(0, QtGui.QFormLayout.LabelRole, self.firhbordLabel)
self.firhbordEdit = QtGui.QLineEdit(self.firhbPage)
self.firhbordEdit.setObjectName(_fromUtf8("firhbordEdit"))
self.formLayout_14.setWidget(0, QtGui.QFormLayout.FieldRole, self.firhbordEdit)
self.firhbtrEditLabel2 = QtGui.QLabel(self.firhbPage)
self.firhbtrEditLabel2.setObjectName(_fromUtf8("firhbtrEditLabel2"))
self.formLayout_14.setWidget(2, QtGui.QFormLayout.LabelRole, self.firhbtrEditLabel2)
self.firhbtrEdit = QtGui.QLineEdit(self.firhbPage)
self.firhbtrEdit.setObjectName(_fromUtf8("firhbtrEdit"))
self.formLayout_14.setWidget(2, QtGui.QFormLayout.FieldRole, self.firhbtrEdit)
self.filterTypeWidget.addWidget(self.firhbPage)
self.verticalLayout.addWidget(self.filterTypeWidget)
self.filterPropsBox = QtGui.QGroupBox(self.filterFrame)
self.filterPropsBox.setObjectName(_fromUtf8("filterPropsBox"))
self.formLayout_8 = QtGui.QFormLayout(self.filterPropsBox)
self.formLayout_8.setFieldGrowthPolicy(QtGui.QFormLayout.AllNonFixedFieldsGrow)
self.formLayout_8.setObjectName(_fromUtf8("formLayout_8"))
self.nTapsLabel = QtGui.QLabel(self.filterPropsBox)
self.nTapsLabel.setMinimumSize(QtCore.QSize(150, 0))
self.nTapsLabel.setObjectName(_fromUtf8("nTapsLabel"))
self.formLayout_8.setWidget(1, QtGui.QFormLayout.LabelRole, self.nTapsLabel)
self.nTapsEdit = QtGui.QLabel(self.filterPropsBox)
self.nTapsEdit.setMaximumSize(QtCore.QSize(100, 16777215))
self.nTapsEdit.setFrameShape(QtGui.QFrame.Box)
self.nTapsEdit.setFrameShadow(QtGui.QFrame.Raised)
self.nTapsEdit.setText(_fromUtf8(""))
self.nTapsEdit.setObjectName(_fromUtf8("nTapsEdit"))
self.formLayout_8.setWidget(1, QtGui.QFormLayout.FieldRole, self.nTapsEdit)
self.verticalLayout.addWidget(self.filterPropsBox)
self.designButton = QtGui.QPushButton(self.filterFrame)
self.designButton.setMinimumSize(QtCore.QSize(0, 0))
self.designButton.setMaximumSize(QtCore.QSize(200, 16777215))
self.designButton.setAutoDefault(True)
self.designButton.setDefault(True)
self.designButton.setObjectName(_fromUtf8("designButton"))
self.verticalLayout.addWidget(self.designButton)
self.gridLayout.addWidget(self.filterFrame, 0, 0, 1, 1)
MainWindow.setCentralWidget(self.centralwidget)
self.menubar = QtGui.QMenuBar(MainWindow)
self.menubar.setGeometry(QtCore.QRect(0, 0, 1128, 19))
self.menubar.setObjectName(_fromUtf8("menubar"))
self.menu_File = QtGui.QMenu(self.menubar)
self.menu_File.setObjectName(_fromUtf8("menu_File"))
self.menu_Analysis = QtGui.QMenu(self.menubar)
self.menu_Analysis.setObjectName(_fromUtf8("menu_Analysis"))
self.menuWidgets = QtGui.QMenu(self.menubar)
self.menuWidgets.setGeometry(QtCore.QRect(408, 108, 129, 86))
self.menuWidgets.setObjectName(_fromUtf8("menuWidgets"))
MainWindow.setMenuBar(self.menubar)
self.statusbar = QtGui.QStatusBar(MainWindow)
self.statusbar.setObjectName(_fromUtf8("statusbar"))
MainWindow.setStatusBar(self.statusbar)
self.action_exit = QtGui.QAction(MainWindow)
self.action_exit.setObjectName(_fromUtf8("action_exit"))
self.action_save = QtGui.QAction(MainWindow)
self.action_save.setObjectName(_fromUtf8("action_save"))
self.action_open = QtGui.QAction(MainWindow)
self.action_open.setObjectName(_fromUtf8("action_open"))
self.actionMagnitude_Response = QtGui.QAction(MainWindow)
self.actionMagnitude_Response.setCheckable(True)
self.actionMagnitude_Response.setChecked(True)
self.actionMagnitude_Response.setObjectName(_fromUtf8("actionMagnitude_Response"))
self.actionPhase_Respone = QtGui.QAction(MainWindow)
self.actionPhase_Respone.setCheckable(True)
self.actionPhase_Respone.setChecked(True)
self.actionPhase_Respone.setObjectName(_fromUtf8("actionPhase_Respone"))
self.actionGroup_Delay = QtGui.QAction(MainWindow)
self.actionGroup_Delay.setCheckable(True)
self.actionGroup_Delay.setChecked(True)
self.actionGroup_Delay.setObjectName(_fromUtf8("actionGroup_Delay"))
self.actionPhase_Delay = QtGui.QAction(MainWindow)
self.actionPhase_Delay.setCheckable(True)
self.actionPhase_Delay.setChecked(True)
self.actionPhase_Delay.setObjectName(_fromUtf8("actionPhase_Delay"))
self.actionImpulse_Response = QtGui.QAction(MainWindow)
self.actionImpulse_Response.setCheckable(True)
self.actionImpulse_Response.setChecked(True)
self.actionImpulse_Response.setObjectName(_fromUtf8("actionImpulse_Response"))
self.actionStep_Response = QtGui.QAction(MainWindow)
self.actionStep_Response.setCheckable(True)
self.actionStep_Response.setChecked(True)
self.actionStep_Response.setObjectName(_fromUtf8("actionStep_Response"))
self.actionPole_Zero_Plot = QtGui.QAction(MainWindow)
self.actionPole_Zero_Plot.setObjectName(_fromUtf8("actionPole_Zero_Plot"))
self.actionGrid = QtGui.QAction(MainWindow)
self.actionGrid.setObjectName(_fromUtf8("actionGrid"))
self.actionPole_Zero_Plot_2 = QtGui.QAction(MainWindow)
self.actionPole_Zero_Plot_2.setCheckable(True)
self.actionPole_Zero_Plot_2.setChecked(True)
self.actionPole_Zero_Plot_2.setObjectName(_fromUtf8("actionPole_Zero_Plot_2"))
self.actionIdeal_Band = QtGui.QAction(MainWindow)
self.actionIdeal_Band.setCheckable(True)
self.actionIdeal_Band.setChecked(False)
self.actionIdeal_Band.setObjectName(_fromUtf8("actionIdeal_Band"))
self.actionGrid_2 = QtGui.QAction(MainWindow)
self.actionGrid_2.setCheckable(True)
self.actionGrid_2.setChecked(False)
self.actionGrid_2.setObjectName(_fromUtf8("actionGrid_2"))
self.actionGrid_3 = QtGui.QAction(MainWindow)
self.actionGrid_3.setObjectName(_fromUtf8("actionGrid_3"))
self.actionTabbed = QtGui.QAction(MainWindow)
self.actionTabbed.setCheckable(True)
self.actionTabbed.setObjectName(_fromUtf8("actionTabbed"))
self.actionOverlay = QtGui.QAction(MainWindow)
self.actionOverlay.setCheckable(True)
self.actionOverlay.setObjectName(_fromUtf8("actionOverlay"))
self.actionResponse_widget = QtGui.QAction(MainWindow)
self.actionResponse_widget.setCheckable(True)
self.actionResponse_widget.setChecked(True)
self.actionResponse_widget.setObjectName(_fromUtf8("actionResponse_widget"))
self.actionSpec_widget = QtGui.QAction(MainWindow)
self.actionSpec_widget.setCheckable(True)
self.actionSpec_widget.setChecked(True)
self.actionSpec_widget.setObjectName(_fromUtf8("actionSpec_widget"))
self.actionQuick_access = QtGui.QAction(MainWindow)
self.actionQuick_access.setCheckable(True)
self.actionQuick_access.setChecked(True)
self.actionQuick_access.setObjectName(_fromUtf8("actionQuick_access"))
self.actionFilter_Coefficients = QtGui.QAction(MainWindow)
self.actionFilter_Coefficients.setCheckable(True)
self.actionFilter_Coefficients.setChecked(True)
self.actionFilter_Coefficients.setObjectName(_fromUtf8("actionFilter_Coefficients"))
self.actionDesign_widget = QtGui.QAction(MainWindow)
self.actionDesign_widget.setCheckable(True)
self.actionDesign_widget.setChecked(True)
self.actionDesign_widget.setObjectName(_fromUtf8("actionDesign_widget"))
self.actionOverlay_2 = QtGui.QAction(MainWindow)
self.actionOverlay_2.setObjectName(_fromUtf8("actionOverlay_2"))
self.actionGridview = QtGui.QAction(MainWindow)
self.actionGridview.setCheckable(True)
self.actionGridview.setObjectName(_fromUtf8("actionGridview"))
self.actionDesign_widget_2 = QtGui.QAction(MainWindow)
self.actionDesign_widget_2.setCheckable(True)
self.actionDesign_widget_2.setObjectName(_fromUtf8("actionDesign_widget_2"))
self.actionQuick_access_2 = QtGui.QAction(MainWindow)
self.actionQuick_access_2.setObjectName(_fromUtf8("actionQuick_access_2"))
self.actionSpec_widget_2 = QtGui.QAction(MainWindow)
self.actionSpec_widget_2.setObjectName(_fromUtf8("actionSpec_widget_2"))
self.actionResponse_widget_2 = QtGui.QAction(MainWindow)
self.actionResponse_widget_2.setObjectName(_fromUtf8("actionResponse_widget_2"))
self.actionDesign_Widget = QtGui.QAction(MainWindow)
self.actionDesign_Widget.setCheckable(True)
self.actionDesign_Widget.setChecked(True)
self.actionDesign_Widget.setObjectName(_fromUtf8("actionDesign_Widget"))
self.actionQuick_Access = QtGui.QAction(MainWindow)
self.actionQuick_Access.setCheckable(True)
self.actionQuick_Access.setChecked(True)
self.actionQuick_Access.setObjectName(_fromUtf8("actionQuick_Access"))
self.actionSpec_Widget = QtGui.QAction(MainWindow)
self.actionSpec_Widget.setCheckable(True)
self.actionSpec_Widget.setChecked(True)
self.actionSpec_Widget.setObjectName(_fromUtf8("actionSpec_Widget"))
self.actionResponse_Widget = QtGui.QAction(MainWindow)
self.actionResponse_Widget.setCheckable(True)
self.actionResponse_Widget.setChecked(True)
self.actionResponse_Widget.setObjectName(_fromUtf8("actionResponse_Widget"))
self.actionTabview_2 = QtGui.QAction(MainWindow)
self.actionTabview_2.setCheckable(True)
self.actionTabview_2.setChecked(True)
self.actionTabview_2.setObjectName(_fromUtf8("actionTabview_2"))
self.actionPlot_select = QtGui.QAction(MainWindow)
self.actionPlot_select.setCheckable(True)
self.actionPlot_select.setChecked(True)
self.actionPlot_select.setObjectName(_fromUtf8("actionPlot_select"))
self.actionBand_Diagram = QtGui.QAction(MainWindow)
self.actionBand_Diagram.setCheckable(True)
self.actionBand_Diagram.setChecked(True)
self.actionBand_Diagram.setObjectName(_fromUtf8("actionBand_Diagram"))
self.actionCheck = QtGui.QAction(MainWindow)
self.actionCheck.setObjectName(_fromUtf8("actionCheck"))
self.actionPlot_FFT_points = QtGui.QAction(MainWindow)
self.actionPlot_FFT_points.setObjectName(_fromUtf8("actionPlot_FFT_points"))
self.menu_File.addAction(self.action_open)
self.menu_File.addAction(self.action_save)
self.menu_File.addAction(self.action_exit)
self.menu_Analysis.addSeparator()
self.menu_Analysis.addAction(self.actionMagnitude_Response)
self.menu_Analysis.addAction(self.actionPhase_Respone)
self.menu_Analysis.addAction(self.actionGroup_Delay)
self.menu_Analysis.addAction(self.actionPhase_Delay)
self.menu_Analysis.addAction(self.actionImpulse_Response)
self.menu_Analysis.addAction(self.actionStep_Response)
self.menu_Analysis.addAction(self.actionGrid_2)
self.menu_Analysis.addAction(self.actionFilter_Coefficients)
self.menu_Analysis.addAction(self.actionIdeal_Band)
self.menu_Analysis.addSeparator()
self.menu_Analysis.addAction(self.actionPole_Zero_Plot_2)
self.menu_Analysis.addAction(self.actionBand_Diagram)
self.menu_Analysis.addSeparator()
self.menu_Analysis.addAction(self.actionDesign_Widget)
self.menu_Analysis.addAction(self.actionQuick_Access)
self.menu_Analysis.addAction(self.actionSpec_Widget)
self.menu_Analysis.addAction(self.actionResponse_Widget)
self.menuWidgets.addAction(self.actionGridview)
self.menuWidgets.addAction(self.actionPlot_select)
self.menubar.addAction(self.menu_File.menuAction())
self.menubar.addAction(self.menu_Analysis.menuAction())
self.menubar.addAction(self.menuWidgets.menuAction())
self.retranslateUi(MainWindow)
self.stackedWindows.setCurrentIndex(0)
self.tabGroup.setCurrentIndex(0)
self.filterspecView.setCurrentIndex(1)
self.mfreqTabgroup.setCurrentIndex(0)
self.mfilterspecView.setCurrentIndex(1)
self.filterTypeWidget.setCurrentIndex(11)
QtCore.QObject.connect(self.action_exit, QtCore.SIGNAL(_fromUtf8("activated()")), MainWindow.close)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
MainWindow.setTabOrder(self.filterTypeComboBox, self.filterDesignTypeComboBox)
MainWindow.setTabOrder(self.filterDesignTypeComboBox, self.endofLpfPassBandEdit)
MainWindow.setTabOrder(self.endofLpfPassBandEdit, self.startofLpfStopBandEdit)
MainWindow.setTabOrder(self.startofLpfStopBandEdit, self.lpfStopBandAttenEdit)
MainWindow.setTabOrder(self.lpfStopBandAttenEdit, self.lpfPassBandRippleEdit)
MainWindow.setTabOrder(self.lpfPassBandRippleEdit, self.startofBpfPassBandEdit)
MainWindow.setTabOrder(self.startofBpfPassBandEdit, self.endofBpfPassBandEdit)
MainWindow.setTabOrder(self.endofBpfPassBandEdit, self.bpfTransitionEdit)
MainWindow.setTabOrder(self.bpfTransitionEdit, self.bpfStopBandAttenEdit)
MainWindow.setTabOrder(self.bpfStopBandAttenEdit, self.bpfPassBandRippleEdit)
MainWindow.setTabOrder(self.bpfPassBandRippleEdit, self.startofBnfStopBandEdit)
MainWindow.setTabOrder(self.startofBnfStopBandEdit, self.endofBnfStopBandEdit)
MainWindow.setTabOrder(self.endofBnfStopBandEdit, self.bnfTransitionEdit)
MainWindow.setTabOrder(self.bnfTransitionEdit, self.bnfStopBandAttenEdit)
MainWindow.setTabOrder(self.bnfStopBandAttenEdit, self.bnfPassBandRippleEdit)
MainWindow.setTabOrder(self.bnfPassBandRippleEdit, self.endofHpfStopBandEdit)
MainWindow.setTabOrder(self.endofHpfStopBandEdit, self.startofHpfPassBandEdit)
MainWindow.setTabOrder(self.startofHpfPassBandEdit, self.hpfStopBandAttenEdit)
MainWindow.setTabOrder(self.hpfStopBandAttenEdit, self.hpfPassBandRippleEdit)
MainWindow.setTabOrder(self.hpfPassBandRippleEdit, self.rrcSymbolRateEdit)
MainWindow.setTabOrder(self.rrcSymbolRateEdit, self.rrcAlphaEdit)
MainWindow.setTabOrder(self.rrcAlphaEdit, self.rrcNumTapsEdit)
MainWindow.setTabOrder(self.rrcNumTapsEdit, self.gausSymbolRateEdit)
MainWindow.setTabOrder(self.gausSymbolRateEdit, self.gausBTEdit)
MainWindow.setTabOrder(self.gausBTEdit, self.gausNumTapsEdit)
MainWindow.setTabOrder(self.gausNumTapsEdit, self.designButton)
def retranslateUi(self, MainWindow):
MainWindow.setWindowTitle(QtGui.QApplication.translate("MainWindow", "GNU Radio Filter Design Tool", None, QtGui.QApplication.UnicodeUTF8))
self.tabGroup.setTabText(self.tabGroup.indexOf(self.freqTab), QtGui.QApplication.translate("MainWindow", "Magnitude Response", None, QtGui.QApplication.UnicodeUTF8))
self.tabGroup.setTabText(self.tabGroup.indexOf(self.timeTab), QtGui.QApplication.translate("MainWindow", "Filter Taps", None, QtGui.QApplication.UnicodeUTF8))
self.tabGroup.setTabText(self.tabGroup.indexOf(self.phaseTab), QtGui.QApplication.translate("MainWindow", "Phase Response", None, QtGui.QApplication.UnicodeUTF8))
self.tabGroup.setTabText(self.tabGroup.indexOf(self.groupTab), QtGui.QApplication.translate("MainWindow", "Group Delay", None, QtGui.QApplication.UnicodeUTF8))
self.tabGroup.setTabText(self.tabGroup.indexOf(self.fcTab), QtGui.QApplication.translate("MainWindow", "Filter Coefficients", None, QtGui.QApplication.UnicodeUTF8))
self.tabGroup.setTabText(self.tabGroup.indexOf(self.impresTab), QtGui.QApplication.translate("MainWindow", "Impulse Response", None, QtGui.QApplication.UnicodeUTF8))
self.tabGroup.setTabText(self.tabGroup.indexOf(self.stepresTab), QtGui.QApplication.translate("MainWindow", "Step Response", None, QtGui.QApplication.UnicodeUTF8))
self.tabGroup.setTabText(self.tabGroup.indexOf(self.pdelayTab), QtGui.QApplication.translate("MainWindow", "Phase Delay", None, QtGui.QApplication.UnicodeUTF8))
self.filterspecView.setTabText(self.filterspecView.indexOf(self.bandDiagram), QtGui.QApplication.translate("MainWindow", "Band Diagram", None, QtGui.QApplication.UnicodeUTF8))
self.addzeroPush.setToolTip(QtGui.QApplication.translate("MainWindow", "Add zero", None, QtGui.QApplication.UnicodeUTF8))
self.addzeroPush.setText(QtGui.QApplication.translate("MainWindow", "...", None, QtGui.QApplication.UnicodeUTF8))
self.addpolePush.setToolTip(QtGui.QApplication.translate("MainWindow", "Add pole", None, QtGui.QApplication.UnicodeUTF8))
self.addpolePush.setText(QtGui.QApplication.translate("MainWindow", "...", None, QtGui.QApplication.UnicodeUTF8))
self.delPush.setToolTip(QtGui.QApplication.translate("MainWindow", "Delete pole/zero", None, QtGui.QApplication.UnicodeUTF8))
self.delPush.setText(QtGui.QApplication.translate("MainWindow", "...", None, QtGui.QApplication.UnicodeUTF8))
self.conjPush.setToolTip(QtGui.QApplication.translate("MainWindow", "Conjugate", None, QtGui.QApplication.UnicodeUTF8))
self.conjPush.setText(QtGui.QApplication.translate("MainWindow", "...", None, QtGui.QApplication.UnicodeUTF8))
self.filterspecView.setTabText(self.filterspecView.indexOf(self.poleZero), QtGui.QApplication.translate("MainWindow", "Pole-Zero Plot", None, QtGui.QApplication.UnicodeUTF8))
self.responseBox.setTitle(QtGui.QApplication.translate("MainWindow", "Filter Responses", None, QtGui.QApplication.UnicodeUTF8))
self.checkMagres.setText(QtGui.QApplication.translate("MainWindow", "Magnitude Response", None, QtGui.QApplication.UnicodeUTF8))
self.checkPhase.setText(QtGui.QApplication.translate("MainWindow", "Phase Response", None, QtGui.QApplication.UnicodeUTF8))
self.checkGdelay.setText(QtGui.QApplication.translate("MainWindow", "Group Delay", None, QtGui.QApplication.UnicodeUTF8))
self.checkPdelay.setText(QtGui.QApplication.translate("MainWindow", "Phase Delay", None, QtGui.QApplication.UnicodeUTF8))
self.checkImpulse.setText(QtGui.QApplication.translate("MainWindow", "Impulse Response", None, QtGui.QApplication.UnicodeUTF8))
self.checkStep.setText(QtGui.QApplication.translate("MainWindow", "Step Response", None, QtGui.QApplication.UnicodeUTF8))
self.checkGrid.setText(QtGui.QApplication.translate("MainWindow", "Grid", None, QtGui.QApplication.UnicodeUTF8))
self.checkFcoeff.setText(QtGui.QApplication.translate("MainWindow", "Filter Coefficients", None, QtGui.QApplication.UnicodeUTF8))
self.checkKeepcur.setText(QtGui.QApplication.translate("MainWindow", "Buffer current plots", None, QtGui.QApplication.UnicodeUTF8))
self.groupSpecs.setTitle(QtGui.QApplication.translate("MainWindow", "Filter Specs", None, QtGui.QApplication.UnicodeUTF8))
self.checkBand.setText(QtGui.QApplication.translate("MainWindow", "Band Diagram", None, QtGui.QApplication.UnicodeUTF8))
self.checkPzplot.setText(QtGui.QApplication.translate("MainWindow", "Pole-Zero Plot", None, QtGui.QApplication.UnicodeUTF8))
self.sysParamsBox.setTitle(QtGui.QApplication.translate("MainWindow", "Plot Parameter", None, QtGui.QApplication.UnicodeUTF8))
self.nfftLabel.setText(QtGui.QApplication.translate("MainWindow", "Num FFT points", None, QtGui.QApplication.UnicodeUTF8))
self.mfmagPush.setToolTip(QtGui.QApplication.translate("MainWindow", "Magnitude Response", None, QtGui.QApplication.UnicodeUTF8))
self.mfmagPush.setText(QtGui.QApplication.translate("MainWindow", "...", None, QtGui.QApplication.UnicodeUTF8))
self.mfphasePush.setToolTip(QtGui.QApplication.translate("MainWindow", "Phase Response", None, QtGui.QApplication.UnicodeUTF8))
self.mfphasePush.setText(QtGui.QApplication.translate("MainWindow", "...", None, QtGui.QApplication.UnicodeUTF8))
self.mfgpdlyPush.setToolTip(QtGui.QApplication.translate("MainWindow", "Group Delay", None, QtGui.QApplication.UnicodeUTF8))
self.mfgpdlyPush.setText(QtGui.QApplication.translate("MainWindow", "...", None, QtGui.QApplication.UnicodeUTF8))
self.mfphdlyPush.setToolTip(QtGui.QApplication.translate("MainWindow", "Phase Delay", None, QtGui.QApplication.UnicodeUTF8))
self.mfphdlyPush.setText(QtGui.QApplication.translate("MainWindow", "...", None, QtGui.QApplication.UnicodeUTF8))
self.mfoverlayPush.setToolTip(QtGui.QApplication.translate("MainWindow", "Overlay", None, QtGui.QApplication.UnicodeUTF8))
self.mfoverlayPush.setText(QtGui.QApplication.translate("MainWindow", "...", None, QtGui.QApplication.UnicodeUTF8))
self.mfreqTabgroup.setTabText(self.mfreqTabgroup.indexOf(self.mfreqTab), QtGui.QApplication.translate("MainWindow", "Frequency Response", None, QtGui.QApplication.UnicodeUTF8))
self.mttapsPush.setToolTip(QtGui.QApplication.translate("MainWindow", "Filter Taps", None, QtGui.QApplication.UnicodeUTF8))
self.mttapsPush.setText(QtGui.QApplication.translate("MainWindow", "...", None, QtGui.QApplication.UnicodeUTF8))
self.mtstepPush.setToolTip(QtGui.QApplication.translate("MainWindow", "Step Response", None, QtGui.QApplication.UnicodeUTF8))
self.mtstepPush.setText(QtGui.QApplication.translate("MainWindow", "...", None, QtGui.QApplication.UnicodeUTF8))
self.mtimpPush.setToolTip(QtGui.QApplication.translate("MainWindow", "Impulse Response", None, QtGui.QApplication.UnicodeUTF8))
self.mtimpPush.setText(QtGui.QApplication.translate("MainWindow", "...", None, QtGui.QApplication.UnicodeUTF8))
self.mtimeTabgroup.setTabText(self.mtimeTabgroup.indexOf(self.mtimeTab), QtGui.QApplication.translate("MainWindow", "Time responses", None, QtGui.QApplication.UnicodeUTF8))
self.mfilterspecView.setTabText(self.mfilterspecView.indexOf(self.mbandDiagram), QtGui.QApplication.translate("MainWindow", "Ideal Band", None, QtGui.QApplication.UnicodeUTF8))
self.maddzeroPush.setToolTip(QtGui.QApplication.translate("MainWindow", "Add zero", None, QtGui.QApplication.UnicodeUTF8))
self.maddzeroPush.setText(QtGui.QApplication.translate("MainWindow", "...", None, QtGui.QApplication.UnicodeUTF8))
self.maddpolePush.setToolTip(QtGui.QApplication.translate("MainWindow", "Add pole", None, QtGui.QApplication.UnicodeUTF8))
self.maddpolePush.setText(QtGui.QApplication.translate("MainWindow", "...", None, QtGui.QApplication.UnicodeUTF8))
self.mdelPush.setToolTip(QtGui.QApplication.translate("MainWindow", "Delete pole/zero", None, QtGui.QApplication.UnicodeUTF8))
self.mdelPush.setText(QtGui.QApplication.translate("MainWindow", "...", None, QtGui.QApplication.UnicodeUTF8))
self.mconjPush.setToolTip(QtGui.QApplication.translate("MainWindow", "Conjugate", None, QtGui.QApplication.UnicodeUTF8))
self.mconjPush.setText(QtGui.QApplication.translate("MainWindow", "...", None, QtGui.QApplication.UnicodeUTF8))
self.mfilterspecView.setTabText(self.mfilterspecView.indexOf(self.mpoleZero), QtGui.QApplication.translate("MainWindow", "Pole-Zero Plot", None, QtGui.QApplication.UnicodeUTF8))
self.mfilterspecView.setTabText(self.mfilterspecView.indexOf(self.mfcTab), QtGui.QApplication.translate("MainWindow", "Filter Coefficients", None, QtGui.QApplication.UnicodeUTF8))
self.fselectComboBox.setItemText(0, QtGui.QApplication.translate("MainWindow", "FIR", None, QtGui.QApplication.UnicodeUTF8))
self.fselectComboBox.setItemText(1, QtGui.QApplication.translate("MainWindow", "IIR(scipy)", None, QtGui.QApplication.UnicodeUTF8))
self.filterTypeComboBox.setItemText(0, QtGui.QApplication.translate("MainWindow", "Low Pass", None, QtGui.QApplication.UnicodeUTF8))
self.filterTypeComboBox.setItemText(1, QtGui.QApplication.translate("MainWindow", "High Pass", None, QtGui.QApplication.UnicodeUTF8))
self.filterTypeComboBox.setItemText(2, QtGui.QApplication.translate("MainWindow", "Band Pass", None, QtGui.QApplication.UnicodeUTF8))
self.filterTypeComboBox.setItemText(3, QtGui.QApplication.translate("MainWindow", "Complex Band Pass", None, QtGui.QApplication.UnicodeUTF8))
self.filterTypeComboBox.setItemText(4, QtGui.QApplication.translate("MainWindow", "Band Notch", None, QtGui.QApplication.UnicodeUTF8))
self.filterTypeComboBox.setItemText(5, QtGui.QApplication.translate("MainWindow", "Root Raised Cosine", None, QtGui.QApplication.UnicodeUTF8))
self.filterTypeComboBox.setItemText(6, QtGui.QApplication.translate("MainWindow", "Gaussian", None, QtGui.QApplication.UnicodeUTF8))
self.filterTypeComboBox.setItemText(7, QtGui.QApplication.translate("MainWindow", "Half Band", None, QtGui.QApplication.UnicodeUTF8))
self.iirfilterBandComboBox.setItemText(0, QtGui.QApplication.translate("MainWindow", "Low Pass", None, QtGui.QApplication.UnicodeUTF8))
self.iirfilterBandComboBox.setItemText(1, QtGui.QApplication.translate("MainWindow", "Band Pass", None, QtGui.QApplication.UnicodeUTF8))
self.iirfilterBandComboBox.setItemText(2, QtGui.QApplication.translate("MainWindow", "Band Stop", None, QtGui.QApplication.UnicodeUTF8))
self.iirfilterBandComboBox.setItemText(3, QtGui.QApplication.translate("MainWindow", "High Pass", None, QtGui.QApplication.UnicodeUTF8))
self.adComboBox.setItemText(0, QtGui.QApplication.translate("MainWindow", "Digital (normalized 0-1)", None, QtGui.QApplication.UnicodeUTF8))
self.adComboBox.setItemText(1, QtGui.QApplication.translate("MainWindow", "Analog (rad/second)", None, QtGui.QApplication.UnicodeUTF8))
self.filterDesignTypeComboBox.setItemText(0, QtGui.QApplication.translate("MainWindow", "Hamming Window", None, QtGui.QApplication.UnicodeUTF8))
self.filterDesignTypeComboBox.setItemText(1, QtGui.QApplication.translate("MainWindow", "Hann Window", None, QtGui.QApplication.UnicodeUTF8))
self.filterDesignTypeComboBox.setItemText(2, QtGui.QApplication.translate("MainWindow", "Blackman Window", None, QtGui.QApplication.UnicodeUTF8))
self.filterDesignTypeComboBox.setItemText(3, QtGui.QApplication.translate("MainWindow", "Rectangular Window", None, QtGui.QApplication.UnicodeUTF8))
self.filterDesignTypeComboBox.setItemText(4, QtGui.QApplication.translate("MainWindow", "Kaiser Window", None, QtGui.QApplication.UnicodeUTF8))
self.filterDesignTypeComboBox.setItemText(5, QtGui.QApplication.translate("MainWindow", "Blackman-harris Window", None, QtGui.QApplication.UnicodeUTF8))
self.filterDesignTypeComboBox.setItemText(6, QtGui.QApplication.translate("MainWindow", "Equiripple", None, QtGui.QApplication.UnicodeUTF8))
self.iirfilterTypeComboBox.setItemText(0, QtGui.QApplication.translate("MainWindow", "Elliptic", None, QtGui.QApplication.UnicodeUTF8))
self.iirfilterTypeComboBox.setItemText(1, QtGui.QApplication.translate("MainWindow", "Butterworth", None, QtGui.QApplication.UnicodeUTF8))
self.iirfilterTypeComboBox.setItemText(2, QtGui.QApplication.translate("MainWindow", "Chebyshev-1", None, QtGui.QApplication.UnicodeUTF8))
self.iirfilterTypeComboBox.setItemText(3, QtGui.QApplication.translate("MainWindow", "Chebyshev-2", None, QtGui.QApplication.UnicodeUTF8))
self.iirfilterTypeComboBox.setItemText(4, QtGui.QApplication.translate("MainWindow", "Bessel", None, QtGui.QApplication.UnicodeUTF8))
self.sampleRateLabel.setText(QtGui.QApplication.translate("MainWindow", "Sample Rate (sps)", None, QtGui.QApplication.UnicodeUTF8))
self.sampleRateEdit.setText(QtGui.QApplication.translate("MainWindow", "320000", None, QtGui.QApplication.UnicodeUTF8))
self.filterGainLabel.setText(QtGui.QApplication.translate("MainWindow", "Filter Gain", None, QtGui.QApplication.UnicodeUTF8))
self.filterGainEdit.setText(QtGui.QApplication.translate("MainWindow", "2", None, QtGui.QApplication.UnicodeUTF8))
self.endofLpfPassBandLabel.setText(QtGui.QApplication.translate("MainWindow", "End of Pass Band (Hz)", None, QtGui.QApplication.UnicodeUTF8))
self.endofLpfPassBandEdit.setText(QtGui.QApplication.translate("MainWindow", "50000", None, QtGui.QApplication.UnicodeUTF8))
self.startofLpfStopBandLabel.setText(QtGui.QApplication.translate("MainWindow", "Start of Stop Band (Hz)", None, QtGui.QApplication.UnicodeUTF8))
self.startofLpfStopBandEdit.setText(QtGui.QApplication.translate("MainWindow", "60000", None, QtGui.QApplication.UnicodeUTF8))
self.lpfStopBandAttenLabel.setText(QtGui.QApplication.translate("MainWindow", "Stop Band Attenuation (dB)", None, QtGui.QApplication.UnicodeUTF8))
self.lpfStopBandAttenEdit.setText(QtGui.QApplication.translate("MainWindow", "40", None, QtGui.QApplication.UnicodeUTF8))
self.lpfPassBandRippleLabel.setText(QtGui.QApplication.translate("MainWindow", "Pass Band Ripple (dB)", None, QtGui.QApplication.UnicodeUTF8))
self.lpfPassBandRippleEdit.setText(QtGui.QApplication.translate("MainWindow", "1", None, QtGui.QApplication.UnicodeUTF8))
self.startofBpfPassBandLabel.setText(QtGui.QApplication.translate("MainWindow", "Start of Pass Band (Hz)", None, QtGui.QApplication.UnicodeUTF8))
self.startofBpfPassBandEdit.setText(QtGui.QApplication.translate("MainWindow", "50000", None, QtGui.QApplication.UnicodeUTF8))
self.endofBpfPassBandLabel.setText(QtGui.QApplication.translate("MainWindow", "End of Pass Band (Hz)", None, QtGui.QApplication.UnicodeUTF8))
self.endofBpfPassBandEdit.setText(QtGui.QApplication.translate("MainWindow", "80000", None, QtGui.QApplication.UnicodeUTF8))
self.bpfStopBandAttenEdit.setText(QtGui.QApplication.translate("MainWindow", "40", None, QtGui.QApplication.UnicodeUTF8))
self.bpfStopBandAttenLabel.setText(QtGui.QApplication.translate("MainWindow", "Stop Band Attenuation (dB)", None, QtGui.QApplication.UnicodeUTF8))
self.bpfTransitionLabel.setText(QtGui.QApplication.translate("MainWindow", "Transition Width (Hz)", None, QtGui.QApplication.UnicodeUTF8))
self.bpfTransitionEdit.setText(QtGui.QApplication.translate("MainWindow", "10000", None, QtGui.QApplication.UnicodeUTF8))
self.bpfPassBandRippleEdit.setText(QtGui.QApplication.translate("MainWindow", "1", None, QtGui.QApplication.UnicodeUTF8))
self.bpfPassBandRippleLabel.setText(QtGui.QApplication.translate("MainWindow", "Pass Band Ripple (dB)", None, QtGui.QApplication.UnicodeUTF8))
self.startofBnfStopBandLabel.setText(QtGui.QApplication.translate("MainWindow", "Start of Stop Band (Hz)", None, QtGui.QApplication.UnicodeUTF8))
self.startofBnfStopBandEdit.setText(QtGui.QApplication.translate("MainWindow", "50000", None, QtGui.QApplication.UnicodeUTF8))
self.endofBnfStopBandLabel.setText(QtGui.QApplication.translate("MainWindow", "End of Stop Band (Hz)", None, QtGui.QApplication.UnicodeUTF8))
self.endofBnfStopBandEdit.setText(QtGui.QApplication.translate("MainWindow", "80000", None, QtGui.QApplication.UnicodeUTF8))
self.bnfTransitionLabel.setText(QtGui.QApplication.translate("MainWindow", "Transition Width (Hz)", None, QtGui.QApplication.UnicodeUTF8))
self.bnfTransitionEdit.setText(QtGui.QApplication.translate("MainWindow", "10000", None, QtGui.QApplication.UnicodeUTF8))
self.bnfStopBandAttenLabel.setText(QtGui.QApplication.translate("MainWindow", "Stop Band Attenuation (dB)", None, QtGui.QApplication.UnicodeUTF8))
self.bnfStopBandAttenEdit.setText(QtGui.QApplication.translate("MainWindow", "48", None, QtGui.QApplication.UnicodeUTF8))
self.bnfPassBandRippleLabel.setText(QtGui.QApplication.translate("MainWindow", "Pass Band Ripple (dB)", None, QtGui.QApplication.UnicodeUTF8))
self.bnfPassBandRippleEdit.setText(QtGui.QApplication.translate("MainWindow", "1", None, QtGui.QApplication.UnicodeUTF8))
self.endofHpfStopBandLabel.setText(QtGui.QApplication.translate("MainWindow", "End of Stop Band (Hz)", None, QtGui.QApplication.UnicodeUTF8))
self.endofHpfStopBandEdit.setText(QtGui.QApplication.translate("MainWindow", "50000", None, QtGui.QApplication.UnicodeUTF8))
self.startofHpfPassBandLabel.setText(QtGui.QApplication.translate("MainWindow", "Start of Pass Band (Hz)", None, QtGui.QApplication.UnicodeUTF8))
self.startofHpfPassBandEdit.setText(QtGui.QApplication.translate("MainWindow", "55000", None, QtGui.QApplication.UnicodeUTF8))
self.hpfStopBandAttenLabel.setText(QtGui.QApplication.translate("MainWindow", "Stop Band Attenuation (dB)", None, QtGui.QApplication.UnicodeUTF8))
self.hpfStopBandAttenEdit.setText(QtGui.QApplication.translate("MainWindow", "48", None, QtGui.QApplication.UnicodeUTF8))
self.hpfPassBandRippleLabel.setText(QtGui.QApplication.translate("MainWindow", "Pass Band Ripple (dB)", None, QtGui.QApplication.UnicodeUTF8))
self.hpfPassBandRippleEdit.setText(QtGui.QApplication.translate("MainWindow", "1", None, QtGui.QApplication.UnicodeUTF8))
self.rrcSymbolRateLabel.setText(QtGui.QApplication.translate("MainWindow", "Symbol Rate (sps)", None, QtGui.QApplication.UnicodeUTF8))
self.rrcAlphaLabel.setText(QtGui.QApplication.translate("MainWindow", "Roll-off Factor", None, QtGui.QApplication.UnicodeUTF8))
self.rrcNumTapsLabel.setText(QtGui.QApplication.translate("MainWindow", "Number of Taps", None, QtGui.QApplication.UnicodeUTF8))
self.rrcSymbolRateEdit.setText(QtGui.QApplication.translate("MainWindow", "3200", None, QtGui.QApplication.UnicodeUTF8))
self.rrcAlphaEdit.setText(QtGui.QApplication.translate("MainWindow", "15", None, QtGui.QApplication.UnicodeUTF8))
self.rrcNumTapsEdit.setText(QtGui.QApplication.translate("MainWindow", "50", None, QtGui.QApplication.UnicodeUTF8))
self.gausSymbolRateLabel.setText(QtGui.QApplication.translate("MainWindow", "Symbol Rate (sps)", None, QtGui.QApplication.UnicodeUTF8))
self.gausSymbolRateEdit.setText(QtGui.QApplication.translate("MainWindow", "5000", None, QtGui.QApplication.UnicodeUTF8))
self.gausBTLabel.setText(QtGui.QApplication.translate("MainWindow", "Roll-off Factor", None, QtGui.QApplication.UnicodeUTF8))
self.gausBTEdit.setText(QtGui.QApplication.translate("MainWindow", "0.5", None, QtGui.QApplication.UnicodeUTF8))
self.gausNumTapsLabel.setText(QtGui.QApplication.translate("MainWindow", "Number of Taps", None, QtGui.QApplication.UnicodeUTF8))
self.gausNumTapsEdit.setText(QtGui.QApplication.translate("MainWindow", "30", None, QtGui.QApplication.UnicodeUTF8))
self.iirendofLpfPassBandLabel.setText(QtGui.QApplication.translate("MainWindow", "End of Pass Band", None, QtGui.QApplication.UnicodeUTF8))
self.iirendofLpfPassBandEdit.setText(QtGui.QApplication.translate("MainWindow", "0.3", None, QtGui.QApplication.UnicodeUTF8))
self.iirstartofLpfStopBandLabel.setText(QtGui.QApplication.translate("MainWindow", "Start of Stop Band ", None, QtGui.QApplication.UnicodeUTF8))
self.iirstartofLpfStopBandEdit.setText(QtGui.QApplication.translate("MainWindow", "0.5", None, QtGui.QApplication.UnicodeUTF8))
self.iirLpfPassBandAttenLabel.setText(QtGui.QApplication.translate("MainWindow", "Max loss in Pass Band (dB)", None, QtGui.QApplication.UnicodeUTF8))
self.iirLpfPassBandAttenEdit.setText(QtGui.QApplication.translate("MainWindow", "1", None, QtGui.QApplication.UnicodeUTF8))
self.iirLpfStopBandRippleLabel.setText(QtGui.QApplication.translate("MainWindow", "Min atten in Stop Band (dB)", None, QtGui.QApplication.UnicodeUTF8))
self.iirLpfStopBandRippleEdit.setText(QtGui.QApplication.translate("MainWindow", "60", None, QtGui.QApplication.UnicodeUTF8))
self.iirendofHpfStopBandLabel.setText(QtGui.QApplication.translate("MainWindow", "End of Stop Band", None, QtGui.QApplication.UnicodeUTF8))
self.iirendofHpfStopBandEdit.setText(QtGui.QApplication.translate("MainWindow", "0.3", None, QtGui.QApplication.UnicodeUTF8))
self.iirstartofHpfPassBandLabel.setText(QtGui.QApplication.translate("MainWindow", "Start of Pass Band ", None, QtGui.QApplication.UnicodeUTF8))
self.iirstartofHpfPassBandEdit.setText(QtGui.QApplication.translate("MainWindow", "0.5", None, QtGui.QApplication.UnicodeUTF8))
self.iirHpfPassBandAttenLabel.setText(QtGui.QApplication.translate("MainWindow", "Max loss in Pass Band (dB)", None, QtGui.QApplication.UnicodeUTF8))
self.iirHpfPassBandAttenEdit.setText(QtGui.QApplication.translate("MainWindow", "1", None, QtGui.QApplication.UnicodeUTF8))
self.iirHpfStopBandRippleLabel.setText(QtGui.QApplication.translate("MainWindow", "Min atten in Stop Band (dB)", None, QtGui.QApplication.UnicodeUTF8))
self.iirHpfStopBandRippleEdit.setText(QtGui.QApplication.translate("MainWindow", "60", None, QtGui.QApplication.UnicodeUTF8))
self.iirendofBpfStopBandLabel1.setText(QtGui.QApplication.translate("MainWindow", "End of Stop Band-1", None, QtGui.QApplication.UnicodeUTF8))
self.iirendofBpfStopBandEdit1.setText(QtGui.QApplication.translate("MainWindow", "0.2", None, QtGui.QApplication.UnicodeUTF8))
self.iirstartofBpfPassBandLabel.setText(QtGui.QApplication.translate("MainWindow", "Start of Pass Band ", None, QtGui.QApplication.UnicodeUTF8))
self.iirstartofBpfPassBandEdit.setText(QtGui.QApplication.translate("MainWindow", "0.3", None, QtGui.QApplication.UnicodeUTF8))
self.iirendofBpfPassBandLabel.setText(QtGui.QApplication.translate("MainWindow", "End of Pass Band ", None, QtGui.QApplication.UnicodeUTF8))
self.iirendofBpfPassBandEdit.setText(QtGui.QApplication.translate("MainWindow", "0.5", None, QtGui.QApplication.UnicodeUTF8))
self.iirstartofBpfStopBandLabel2.setText(QtGui.QApplication.translate("MainWindow", "Start of Stop Band-2", None, QtGui.QApplication.UnicodeUTF8))
self.iirstartofBpfStopBandEdit2.setText(QtGui.QApplication.translate("MainWindow", "0.6", None, QtGui.QApplication.UnicodeUTF8))
self.iirBpfPassBandAttenLabel.setText(QtGui.QApplication.translate("MainWindow", "Max loss in Pass Band (dB)", None, QtGui.QApplication.UnicodeUTF8))
self.iirBpfPassBandAttenEdit.setText(QtGui.QApplication.translate("MainWindow", "1", None, QtGui.QApplication.UnicodeUTF8))
self.iirBpfStopBandRippleLabel.setText(QtGui.QApplication.translate("MainWindow", "Min atten in Stop Band (dB)", None, QtGui.QApplication.UnicodeUTF8))
self.iirBpfStopBandRippleEdit.setText(QtGui.QApplication.translate("MainWindow", "60", None, QtGui.QApplication.UnicodeUTF8))
self.iirendofBsfPassBandLabel1.setText(QtGui.QApplication.translate("MainWindow", "End of Pass Band-1", None, QtGui.QApplication.UnicodeUTF8))
self.iirendofBsfPassBandEdit1.setText(QtGui.QApplication.translate("MainWindow", "0.2", None, QtGui.QApplication.UnicodeUTF8))
self.iirstartofBsfStopBandLabel.setText(QtGui.QApplication.translate("MainWindow", "Start of Stop Band ", None, QtGui.QApplication.UnicodeUTF8))
self.iirstartofBsfStopBandEdit.setText(QtGui.QApplication.translate("MainWindow", "0.3", None, QtGui.QApplication.UnicodeUTF8))
self.iirendofBsfStopBandLabel.setText(QtGui.QApplication.translate("MainWindow", "End of Stop Band ", None, QtGui.QApplication.UnicodeUTF8))
self.iirendofBsfStopBandEdit.setText(QtGui.QApplication.translate("MainWindow", "0.6", None, QtGui.QApplication.UnicodeUTF8))
self.iirstartofBsfPassBandLabel2.setText(QtGui.QApplication.translate("MainWindow", "Start of Pass Band-2", None, QtGui.QApplication.UnicodeUTF8))
self.iirstartofBsfPassBandEdit2.setText(QtGui.QApplication.translate("MainWindow", "0.7", None, QtGui.QApplication.UnicodeUTF8))
self.iirBsfPassBandAttenLabel.setText(QtGui.QApplication.translate("MainWindow", "Max loss in Pass Band (dB)", None, QtGui.QApplication.UnicodeUTF8))
self.iirBsfPassBandAttenEdit.setText(QtGui.QApplication.translate("MainWindow", "1", None, QtGui.QApplication.UnicodeUTF8))
self.iirBsfStopBandRippleLabel.setText(QtGui.QApplication.translate("MainWindow", "Min atten in Stop Band (dB)", None, QtGui.QApplication.UnicodeUTF8))
self.iirBsfStopBandRippleEdit.setText(QtGui.QApplication.translate("MainWindow", "60", None, QtGui.QApplication.UnicodeUTF8))
self.besselordLabel.setText(QtGui.QApplication.translate("MainWindow", "Filter Order", None, QtGui.QApplication.UnicodeUTF8))
self.besselordEdit.setText(QtGui.QApplication.translate("MainWindow", "10", None, QtGui.QApplication.UnicodeUTF8))
self.iirbesselcritLabel1.setText(QtGui.QApplication.translate("MainWindow", "Critical point-1", None, QtGui.QApplication.UnicodeUTF8))
self.iirbesselcritEdit1.setText(QtGui.QApplication.translate("MainWindow", "0.2", None, QtGui.QApplication.UnicodeUTF8))
self.iirbesselcritEdit2.setText(QtGui.QApplication.translate("MainWindow", "0.5", None, QtGui.QApplication.UnicodeUTF8))
self.iirbesselcritLabel2.setText(QtGui.QApplication.translate("MainWindow", "Critical point-2", None, QtGui.QApplication.UnicodeUTF8))
self.firhbordLabel.setText(QtGui.QApplication.translate("MainWindow", "Filter Order", None, QtGui.QApplication.UnicodeUTF8))
self.firhbordEdit.setText(QtGui.QApplication.translate("MainWindow", "34", None, QtGui.QApplication.UnicodeUTF8))
self.firhbtrEditLabel2.setText(QtGui.QApplication.translate("MainWindow", "Transition width \n"
" (from fs/4)", None, QtGui.QApplication.UnicodeUTF8))
self.firhbtrEdit.setText(QtGui.QApplication.translate("MainWindow", "10000", None, QtGui.QApplication.UnicodeUTF8))
self.filterPropsBox.setTitle(QtGui.QApplication.translate("MainWindow", "Filter Properties", None, QtGui.QApplication.UnicodeUTF8))
self.nTapsLabel.setText(QtGui.QApplication.translate("MainWindow", "Number of Taps:", None, QtGui.QApplication.UnicodeUTF8))
self.designButton.setText(QtGui.QApplication.translate("MainWindow", "Design", None, QtGui.QApplication.UnicodeUTF8))
self.menu_File.setTitle(QtGui.QApplication.translate("MainWindow", "&File", None, QtGui.QApplication.UnicodeUTF8))
self.menu_Analysis.setTitle(QtGui.QApplication.translate("MainWindow", "Analysis", None, QtGui.QApplication.UnicodeUTF8))
self.menuWidgets.setTitle(QtGui.QApplication.translate("MainWindow", "View", None, QtGui.QApplication.UnicodeUTF8))
self.action_exit.setText(QtGui.QApplication.translate("MainWindow", "E&xit", None, QtGui.QApplication.UnicodeUTF8))
self.action_save.setText(QtGui.QApplication.translate("MainWindow", "&Save", None, QtGui.QApplication.UnicodeUTF8))
self.action_save.setShortcut(QtGui.QApplication.translate("MainWindow", "Ctrl+S", None, QtGui.QApplication.UnicodeUTF8))
self.action_open.setText(QtGui.QApplication.translate("MainWindow", "&Open", None, QtGui.QApplication.UnicodeUTF8))
self.action_open.setShortcut(QtGui.QApplication.translate("MainWindow", "Ctrl+O", None, QtGui.QApplication.UnicodeUTF8))
self.actionMagnitude_Response.setText(QtGui.QApplication.translate("MainWindow", "Magnitude Response", None, QtGui.QApplication.UnicodeUTF8))
self.actionPhase_Respone.setText(QtGui.QApplication.translate("MainWindow", "Phase Respone", None, QtGui.QApplication.UnicodeUTF8))
self.actionGroup_Delay.setText(QtGui.QApplication.translate("MainWindow", "Group Delay", None, QtGui.QApplication.UnicodeUTF8))
self.actionPhase_Delay.setText(QtGui.QApplication.translate("MainWindow", "Phase Delay", None, QtGui.QApplication.UnicodeUTF8))
self.actionImpulse_Response.setText(QtGui.QApplication.translate("MainWindow", "Impulse Response", None, QtGui.QApplication.UnicodeUTF8))
self.actionStep_Response.setText(QtGui.QApplication.translate("MainWindow", "Step Response", None, QtGui.QApplication.UnicodeUTF8))
self.actionPole_Zero_Plot.setText(QtGui.QApplication.translate("MainWindow", "Pole-Zero Plot", None, QtGui.QApplication.UnicodeUTF8))
self.actionGrid.setText(QtGui.QApplication.translate("MainWindow", "Grid", None, QtGui.QApplication.UnicodeUTF8))
self.actionPole_Zero_Plot_2.setText(QtGui.QApplication.translate("MainWindow", "Pole Zero Plot", None, QtGui.QApplication.UnicodeUTF8))
self.actionIdeal_Band.setText(QtGui.QApplication.translate("MainWindow", "Ideal Band", None, QtGui.QApplication.UnicodeUTF8))
self.actionGrid_2.setText(QtGui.QApplication.translate("MainWindow", "Grid", None, QtGui.QApplication.UnicodeUTF8))
self.actionGrid_3.setText(QtGui.QApplication.translate("MainWindow", "Grid", None, QtGui.QApplication.UnicodeUTF8))
self.actionTabbed.setText(QtGui.QApplication.translate("MainWindow", "Tabbed", None, QtGui.QApplication.UnicodeUTF8))
self.actionOverlay.setText(QtGui.QApplication.translate("MainWindow", "Overlay", None, QtGui.QApplication.UnicodeUTF8))
self.actionResponse_widget.setText(QtGui.QApplication.translate("MainWindow", "Response widget", None, QtGui.QApplication.UnicodeUTF8))
self.actionSpec_widget.setText(QtGui.QApplication.translate("MainWindow", "Spec widget", None, QtGui.QApplication.UnicodeUTF8))
self.actionQuick_access.setText(QtGui.QApplication.translate("MainWindow", "Quick access", None, QtGui.QApplication.UnicodeUTF8))
self.actionFilter_Coefficients.setText(QtGui.QApplication.translate("MainWindow", "Filter Coefficients", None, QtGui.QApplication.UnicodeUTF8))
self.actionDesign_widget.setText(QtGui.QApplication.translate("MainWindow", "Design widget", None, QtGui.QApplication.UnicodeUTF8))
self.actionOverlay_2.setText(QtGui.QApplication.translate("MainWindow", "Overlay", None, QtGui.QApplication.UnicodeUTF8))
self.actionGridview.setText(QtGui.QApplication.translate("MainWindow", "Gridview", None, QtGui.QApplication.UnicodeUTF8))
self.actionDesign_widget_2.setText(QtGui.QApplication.translate("MainWindow", "Design widget", None, QtGui.QApplication.UnicodeUTF8))
self.actionQuick_access_2.setText(QtGui.QApplication.translate("MainWindow", "Quick access", None, QtGui.QApplication.UnicodeUTF8))
self.actionSpec_widget_2.setText(QtGui.QApplication.translate("MainWindow", "Spec widget", None, QtGui.QApplication.UnicodeUTF8))
self.actionResponse_widget_2.setText(QtGui.QApplication.translate("MainWindow", "Response widget", None, QtGui.QApplication.UnicodeUTF8))
self.actionDesign_Widget.setText(QtGui.QApplication.translate("MainWindow", "Design Widget", None, QtGui.QApplication.UnicodeUTF8))
self.actionQuick_Access.setText(QtGui.QApplication.translate("MainWindow", "Quick Access", None, QtGui.QApplication.UnicodeUTF8))
self.actionSpec_Widget.setText(QtGui.QApplication.translate("MainWindow", "Spec Widget", None, QtGui.QApplication.UnicodeUTF8))
self.actionResponse_Widget.setText(QtGui.QApplication.translate("MainWindow", "Response Widget", None, QtGui.QApplication.UnicodeUTF8))
self.actionTabview_2.setText(QtGui.QApplication.translate("MainWindow", "Tabview", None, QtGui.QApplication.UnicodeUTF8))
self.actionPlot_select.setText(QtGui.QApplication.translate("MainWindow", "Plot select", None, QtGui.QApplication.UnicodeUTF8))
self.actionBand_Diagram.setText(QtGui.QApplication.translate("MainWindow", "Band Diagram", None, QtGui.QApplication.UnicodeUTF8))
self.actionCheck.setText(QtGui.QApplication.translate("MainWindow", "check", None, QtGui.QApplication.UnicodeUTF8))
self.actionPlot_FFT_points.setText(QtGui.QApplication.translate("MainWindow", "Plot FFT points", None, QtGui.QApplication.UnicodeUTF8))
from PyQt4 import Qwt5
from bandgraphicsview import BandGraphicsView
from polezero_plot import PzPlot
import icons_rc
|
NicovincX2/Python-3.5 | refs/heads/master | Problèmes divers/Find the missing permutation/find_comparison.py | 1 | # -*- coding: utf-8 -*-
import os
from itertools import permutations
given = '''ABCD CABD ACDB DACB BCDA ACBD ADCB CDAB DABC BCAD CADB CDBA
CBAD ABDC ADBC BDCA DCBA BACD BADC BDAC CBDA DBCA DCAB'''.split()
allPerms = [''.join(x) for x in permutations(given[0])]
missing = list(set(allPerms) - set(given)) # ['DBAC']
os.system("pause")
|
HaveF/idapython | refs/heads/master | pywraps/py_plgform.py | 16 | import _idaapi
#<pycode(py_plgform)>
class PluginForm(object):
"""
PluginForm class.
This form can be used to host additional controls. Please check the PyQt example.
"""
FORM_MDI = 0x01
"""start by default as MDI (obsolete)"""
FORM_TAB = 0x02
"""attached by default to a tab"""
FORM_RESTORE = 0x04
"""restore state from desktop config"""
FORM_ONTOP = 0x08
"""form should be "ontop"""
FORM_MENU = 0x10
"""form must be listed in the windows menu (automatically set for all plugins)"""
FORM_CENTERED = 0x20
"""form will be centered on the screen"""
FORM_PERSIST = 0x40
"""form will persist until explicitly closed with Close()"""
def __init__(self):
"""
"""
self.__clink__ = _idaapi.plgform_new()
def Show(self, caption, options = 0):
"""
Creates the form if not was not created or brings to front if it was already created
@param caption: The form caption
@param options: One of PluginForm.FORM_ constants
"""
options |= PluginForm.FORM_TAB|PluginForm.FORM_MENU|PluginForm.FORM_RESTORE
return _idaapi.plgform_show(self.__clink__, self, caption, options)
@staticmethod
def FormToPyQtWidget(form, ctx = sys.modules['__main__']):
"""
Use this method to convert a TForm* to a QWidget to be used by PyQt
@param ctx: Context. Reference to a module that already imported SIP and QtGui modules
"""
return ctx.sip.wrapinstance(ctx.sip.voidptr(form).__int__(), ctx.QtGui.QWidget)
@staticmethod
def FormToPySideWidget(form, ctx = sys.modules['__main__']):
"""
Use this method to convert a TForm* to a QWidget to be used by PySide
@param ctx: Context. Reference to a module that already imported QtGui module
"""
if form is None:
return None
if type(form).__name__ == "SwigPyObject":
# Since 'form' is a SwigPyObject, we first need to convert it to a PyCObject.
# However, there's no easy way of doing it, so we'll use a rather brutal approach:
# converting the SwigPyObject to a 'long' (will go through 'SwigPyObject_long',
# that will return the pointer's value as a long), and then convert that value
# back to a pointer into a PyCObject.
ptr_l = long(form)
from ctypes import pythonapi, c_void_p, py_object
pythonapi.PyCObject_FromVoidPtr.restype = py_object
pythonapi.PyCObject_AsVoidPtr.argtypes = [c_void_p, c_void_p]
form = pythonapi.PyCObject_FromVoidPtr(ptr_l, 0)
return ctx.QtGui.QWidget.FromCObject(form)
def OnCreate(self, form):
"""
This event is called when the plugin form is created.
The programmer should populate the form when this event is triggered.
@return: None
"""
pass
def OnClose(self, form):
"""
Called when the plugin form is closed
@return: None
"""
pass
def Close(self, options):
"""
Closes the form.
@param options: Close options (FORM_SAVE, FORM_NO_CONTEXT, ...)
@return: None
"""
return _idaapi.plgform_close(self.__clink__, options)
FORM_SAVE = 0x1
"""Save state in desktop config"""
FORM_NO_CONTEXT = 0x2
"""Don't change the current context (useful for toolbars)"""
FORM_DONT_SAVE_SIZE = 0x4
"""Don't save size of the window"""
FORM_CLOSE_LATER = 0x8
"""This flag should be used when Close() is called from an event handler"""
#</pycode(py_plgform)>
plg = PluginForm()
plg.Show("This is it")
|
abdullah2891/remo | refs/heads/master | vendor-local/lib/python/tablib/packages/yaml/serializer.py | 561 |
__all__ = ['Serializer', 'SerializerError']
from error import YAMLError
from events import *
from nodes import *
class SerializerError(YAMLError):
pass
class Serializer(object):
ANCHOR_TEMPLATE = u'id%03d'
def __init__(self, encoding=None,
explicit_start=None, explicit_end=None, version=None, tags=None):
self.use_encoding = encoding
self.use_explicit_start = explicit_start
self.use_explicit_end = explicit_end
self.use_version = version
self.use_tags = tags
self.serialized_nodes = {}
self.anchors = {}
self.last_anchor_id = 0
self.closed = None
def open(self):
if self.closed is None:
self.emit(StreamStartEvent(encoding=self.use_encoding))
self.closed = False
elif self.closed:
raise SerializerError("serializer is closed")
else:
raise SerializerError("serializer is already opened")
def close(self):
if self.closed is None:
raise SerializerError("serializer is not opened")
elif not self.closed:
self.emit(StreamEndEvent())
self.closed = True
#def __del__(self):
# self.close()
def serialize(self, node):
if self.closed is None:
raise SerializerError("serializer is not opened")
elif self.closed:
raise SerializerError("serializer is closed")
self.emit(DocumentStartEvent(explicit=self.use_explicit_start,
version=self.use_version, tags=self.use_tags))
self.anchor_node(node)
self.serialize_node(node, None, None)
self.emit(DocumentEndEvent(explicit=self.use_explicit_end))
self.serialized_nodes = {}
self.anchors = {}
self.last_anchor_id = 0
def anchor_node(self, node):
if node in self.anchors:
if self.anchors[node] is None:
self.anchors[node] = self.generate_anchor(node)
else:
self.anchors[node] = None
if isinstance(node, SequenceNode):
for item in node.value:
self.anchor_node(item)
elif isinstance(node, MappingNode):
for key, value in node.value:
self.anchor_node(key)
self.anchor_node(value)
def generate_anchor(self, node):
self.last_anchor_id += 1
return self.ANCHOR_TEMPLATE % self.last_anchor_id
def serialize_node(self, node, parent, index):
alias = self.anchors[node]
if node in self.serialized_nodes:
self.emit(AliasEvent(alias))
else:
self.serialized_nodes[node] = True
self.descend_resolver(parent, index)
if isinstance(node, ScalarNode):
detected_tag = self.resolve(ScalarNode, node.value, (True, False))
default_tag = self.resolve(ScalarNode, node.value, (False, True))
implicit = (node.tag == detected_tag), (node.tag == default_tag)
self.emit(ScalarEvent(alias, node.tag, implicit, node.value,
style=node.style))
elif isinstance(node, SequenceNode):
implicit = (node.tag
== self.resolve(SequenceNode, node.value, True))
self.emit(SequenceStartEvent(alias, node.tag, implicit,
flow_style=node.flow_style))
index = 0
for item in node.value:
self.serialize_node(item, node, index)
index += 1
self.emit(SequenceEndEvent())
elif isinstance(node, MappingNode):
implicit = (node.tag
== self.resolve(MappingNode, node.value, True))
self.emit(MappingStartEvent(alias, node.tag, implicit,
flow_style=node.flow_style))
for key, value in node.value:
self.serialize_node(key, node, None)
self.serialize_node(value, node, key)
self.emit(MappingEndEvent())
self.ascend_resolver()
|
HopeFOAM/HopeFOAM | refs/heads/master | ThirdParty-0.1/ParaView-5.0.1/Applications/ParaView/Testing/Python/SaveAnimationMultiView.py | 1 | #/usr/bin/env python
import QtTesting
import QtTestingImage
import time
object1 = 'pqClientMainWindow/menubar/menuSources'
QtTesting.playCommand(object1, 'activate', 'Wavelet')
object2 = 'pqClientMainWindow/objectInspectorDock/objectInspector/Accept'
QtTesting.playCommand(object2, 'activate', '')
object6 = 'pqClientMainWindow/1QTabBar1'
QtTesting.playCommand(object6, 'set_tab_with_text', 'Display')
object24 = 'pqClientMainWindow/displayDock/displayWidgetFrame/displayScrollArea/qt_scrollarea_viewport/displayWidget/pqDisplayProxyEditor/StyleGroup/StyleRepresentation/comboBox'
QtTesting.playCommand(object24, 'set_string', 'Surface')
object25 = 'pqClientMainWindow/displayDock/displayWidgetFrame/displayScrollArea/qt_scrollarea_viewport/displayWidget/pqDisplayProxyEditor/ColorGroup/ColorBy/Variables'
QtTesting.playCommand(object25, 'set_string', 'RTData')
QtTesting.playCommand(object6, 'set_tab_with_text', 'Properties')
object3 = 'pqClientMainWindow/centralwidget/MultiViewWidget/CoreWidget/qt_tabwidget_stackedwidget/MultiViewWidget1/Frame.0/SplitHorizontal'
QtTesting.playCommand(object3, 'activate', '')
QtTesting.playCommand(object1, 'activate', 'Arrow')
QtTesting.playCommand(object2, 'activate', '')
object4 = 'pqClientMainWindow/objectInspectorDock/objectInspector/Delete'
QtTesting.playCommand(object4, 'activate', '')
object4 = 'pqClientMainWindow/centralwidget/MultiViewWidget/CoreWidget/qt_tabwidget_stackedwidget/MultiViewWidget1/Splitter.0/Frame.2/SplitVertical'
QtTesting.playCommand(object4, 'activate', '')
QtTesting.playCommand(object1, 'activate', 'Sphere')
QtTesting.playCommand(object2, 'activate', '')
QtTesting.playCommand(object6, 'set_tab_with_text', 'Display')
object8 = 'pqClientMainWindow/displayDock/displayWidgetFrame/displayScrollArea/qt_scrollarea_viewport/displayWidget/Form/ViewGroup/ViewData'
QtTesting.playCommand(object8, 'set_boolean', 'false')
QtTesting.playCommand(object8, 'set_boolean', 'false')
QtTesting.playCommand(object8, 'set_boolean', 'false')
QtTesting.playCommand(object8, 'set_boolean', 'false')
object9 = 'pqClientMainWindow/menubar/menu_File'
QtTesting.playCommand(object9, 'activate', 'actionFileOpen')
QtTesting.playCommand(object6, 'set_tab_with_text', 'Properties')
object10 = 'pqClientMainWindow/FileOpenDialog'
QtTesting.playCommand(object10, 'filesSelected', '$PARAVIEW_DATA_ROOT/dualSphereAnimation.pvd')
QtTesting.playCommand(object2, 'activate', '')
object11 = 'pqClientMainWindow/menubar/menuView'
QtTesting.playCommand(object11, 'activate', 'Animation View')
object15 = 'pqClientMainWindow/pipelineBrowserDock/pipelineBrowser'
QtTesting.playCommand(object15, 'currentChanged', '/0/1|0')
#object12 = 'pqClientMainWindow/animationPanelDock/1pqAnimationPanel0/scrollArea/qt_scrollarea_viewport/AnimationPanel/tracksGroup/propertyName'
#object14 = 'pqClientMainWindow/animationPanelDock/1pqAnimationPanel0/scrollArea/qt_scrollarea_viewport/AnimationPanel/keyFramePropertiesGroup/addKeyFrame'
#QtTesting.playCommand(object12, 'set_string', 'End Theta')
#QtTesting.playCommand(object14, 'activate', '')
object12 = 'pqClientMainWindow/animationViewDock/animationView/pqAnimationWidget/CreateDeleteWidget/PropertyCombo'
QtTesting.playCommand(object12, 'set_string', 'End Theta')
object10 = "pqClientMainWindow/animationViewDock/animationView/1pqAnimationWidget0/1QHeaderView0"
QtTesting.playCommand(object10, "mousePress", "1,1,0,0,0,2")
QtTesting.playCommand(object10, "mouseRelease", "1,1,0,0,0,2")
QtTesting.playCommand(object15, 'currentChanged', '/0/0|0')
QtTesting.playCommand(object10, "mousePress", "1,1,0,0,0,3")
QtTesting.playCommand(object10, "mouseRelease", "1,1,0,0,0,3")
object17 = 'pqClientMainWindow/VCRToolbar/1QToolButton0'
QtTesting.playCommand(object17, 'activate', '')
object18 = 'pqClientMainWindow/VCRToolbar/1QToolButton3'
#object19 = 'pqClientMainWindow/animationPanelDock/1pqAnimationPanel0/scrollArea/qt_scrollarea_viewport/AnimationPanel/keyFramePropertiesGroup/editorFrame/SignalAdaptorKeyFrameValue/lineEdit'
#QtTesting.playCommand(object19, 'set_string', '10')
#QtTesting.playCommand(object19, 'set_string', '10')
object20 = 'pqClientMainWindow/VCRToolbar/1QToolButton1'
QtTesting.playCommand(object11, 'activate', 'Animation View')
QtTesting.playCommand(object11, 'activate', 'Object Inspector')
QtTesting.playCommand(object11, 'activate', 'Pipeline Browser')
QtTesting.playCommand(object9, 'activate', 'actionFileSaveAnimation')
object21 = 'Dialog/spinBoxWidth'
QtTesting.playCommand(object21, 'set_int', '800')
object22 = 'Dialog/spinBoxHeight'
QtTesting.playCommand(object22, 'set_int', '800')
object22 = 'Dialog/okButton'
QtTesting.playCommand(object22, 'activate', '')
objectSaveAnimationDialog = 'FileSaveAnimationDialog'
QtTesting.playCommand(objectSaveAnimationDialog, 'filesSelected', '$PARAVIEW_TEST_ROOT/movie_test.png')
time.sleep(3);
objectPlayButton = 'pqClientMainWindow/VCRToolbar/1QToolButton2'
while QtTesting.getProperty(objectPlayButton, "text") != 'Play' :
time.sleep(1);
QtTestingImage.compareImage('$PARAVIEW_TEST_ROOT/movie_test.0005.png', 'SaveAnimationMultiView.png');
|
abircse06/youtube-dl | refs/heads/master | youtube_dl/extractor/mixcloud.py | 11 | from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..compat import (
compat_urllib_parse,
)
from ..utils import (
ExtractorError,
HEADRequest,
str_to_int,
)
class MixcloudIE(InfoExtractor):
_VALID_URL = r'^(?:https?://)?(?:www\.)?mixcloud\.com/([^/]+)/([^/]+)'
IE_NAME = 'mixcloud'
_TESTS = [{
'url': 'http://www.mixcloud.com/dholbach/cryptkeeper/',
'info_dict': {
'id': 'dholbach-cryptkeeper',
'ext': 'mp3',
'title': 'Cryptkeeper',
'description': 'After quite a long silence from myself, finally another Drum\'n\'Bass mix with my favourite current dance floor bangers.',
'uploader': 'Daniel Holbach',
'uploader_id': 'dholbach',
'thumbnail': 're:https?://.*\.jpg',
'view_count': int,
'like_count': int,
},
}, {
'url': 'http://www.mixcloud.com/gillespeterson/caribou-7-inch-vinyl-mix-chat/',
'info_dict': {
'id': 'gillespeterson-caribou-7-inch-vinyl-mix-chat',
'ext': 'mp3',
'title': 'Caribou 7 inch Vinyl Mix & Chat',
'description': 'md5:2b8aec6adce69f9d41724647c65875e8',
'uploader': 'Gilles Peterson Worldwide',
'uploader_id': 'gillespeterson',
'thumbnail': 're:https?://.*/images/',
'view_count': int,
'like_count': int,
},
}]
def _check_url(self, url, track_id, ext):
try:
# We only want to know if the request succeed
# don't download the whole file
self._request_webpage(
HEADRequest(url), track_id,
'Trying %s URL' % ext)
return True
except ExtractorError:
return False
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
uploader = mobj.group(1)
cloudcast_name = mobj.group(2)
track_id = compat_urllib_parse.unquote('-'.join((uploader, cloudcast_name)))
webpage = self._download_webpage(url, track_id)
preview_url = self._search_regex(
r'\s(?:data-preview-url|m-preview)="([^"]+)"', webpage, 'preview url')
song_url = preview_url.replace('/previews/', '/c/originals/')
if not self._check_url(song_url, track_id, 'mp3'):
song_url = song_url.replace('.mp3', '.m4a').replace('originals/', 'm4a/64/')
if not self._check_url(song_url, track_id, 'm4a'):
raise ExtractorError('Unable to extract track url')
PREFIX = (
r'm-play-on-spacebar[^>]+'
r'(?:\s+[a-zA-Z0-9-]+(?:="[^"]+")?)*?\s+')
title = self._html_search_regex(
PREFIX + r'm-title="([^"]+)"', webpage, 'title')
thumbnail = self._proto_relative_url(self._html_search_regex(
PREFIX + r'm-thumbnail-url="([^"]+)"', webpage, 'thumbnail',
fatal=False))
uploader = self._html_search_regex(
PREFIX + r'm-owner-name="([^"]+)"',
webpage, 'uploader', fatal=False)
uploader_id = self._search_regex(
r'\s+"profile": "([^"]+)",', webpage, 'uploader id', fatal=False)
description = self._og_search_description(webpage)
like_count = str_to_int(self._search_regex(
r'\bbutton-favorite\b[^>]+m-ajax-toggle-count="([^"]+)"',
webpage, 'like count', fatal=False))
view_count = str_to_int(self._search_regex(
[r'<meta itemprop="interactionCount" content="UserPlays:([0-9]+)"',
r'/listeners/?">([0-9,.]+)</a>'],
webpage, 'play count', fatal=False))
return {
'id': track_id,
'title': title,
'url': song_url,
'description': description,
'thumbnail': thumbnail,
'uploader': uploader,
'uploader_id': uploader_id,
'view_count': view_count,
'like_count': like_count,
}
|
elkingtonmcb/pattern | refs/heads/master | pattern/server/cherrypy/cherrypy/__init__.py | 37 | """CherryPy is a pythonic, object-oriented HTTP framework.
CherryPy consists of not one, but four separate API layers.
The APPLICATION LAYER is the simplest. CherryPy applications are written as
a tree of classes and methods, where each branch in the tree corresponds to
a branch in the URL path. Each method is a 'page handler', which receives
GET and POST params as keyword arguments, and returns or yields the (HTML)
body of the response. The special method name 'index' is used for paths
that end in a slash, and the special method name 'default' is used to
handle multiple paths via a single handler. This layer also includes:
* the 'exposed' attribute (and cherrypy.expose)
* cherrypy.quickstart()
* _cp_config attributes
* cherrypy.tools (including cherrypy.session)
* cherrypy.url()
The ENVIRONMENT LAYER is used by developers at all levels. It provides
information about the current request and response, plus the application
and server environment, via a (default) set of top-level objects:
* cherrypy.request
* cherrypy.response
* cherrypy.engine
* cherrypy.server
* cherrypy.tree
* cherrypy.config
* cherrypy.thread_data
* cherrypy.log
* cherrypy.HTTPError, NotFound, and HTTPRedirect
* cherrypy.lib
The EXTENSION LAYER allows advanced users to construct and share their own
plugins. It consists of:
* Hook API
* Tool API
* Toolbox API
* Dispatch API
* Config Namespace API
Finally, there is the CORE LAYER, which uses the core API's to construct
the default components which are available at higher layers. You can think
of the default components as the 'reference implementation' for CherryPy.
Megaframeworks (and advanced users) may replace the default components
with customized or extended components. The core API's are:
* Application API
* Engine API
* Request API
* Server API
* WSGI API
These API's are described in the CherryPy specification:
http://www.cherrypy.org/wiki/CherryPySpec
"""
__version__ = "3.2.4"
from cherrypy._cpcompat import urljoin as _urljoin, urlencode as _urlencode
from cherrypy._cpcompat import basestring, unicodestr, set
from cherrypy._cperror import HTTPError, HTTPRedirect, InternalRedirect
from cherrypy._cperror import NotFound, CherryPyException, TimeoutError
from cherrypy import _cpdispatch as dispatch
from cherrypy import _cptools
tools = _cptools.default_toolbox
Tool = _cptools.Tool
from cherrypy import _cprequest
from cherrypy.lib import httputil as _httputil
from cherrypy import _cptree
tree = _cptree.Tree()
from cherrypy._cptree import Application
from cherrypy import _cpwsgi as wsgi
from cherrypy import process
try:
from cherrypy.process import win32
engine = win32.Win32Bus()
engine.console_control_handler = win32.ConsoleCtrlHandler(engine)
del win32
except ImportError:
engine = process.bus
# Timeout monitor. We add two channels to the engine
# to which cherrypy.Application will publish.
engine.listeners['before_request'] = set()
engine.listeners['after_request'] = set()
class _TimeoutMonitor(process.plugins.Monitor):
def __init__(self, bus):
self.servings = []
process.plugins.Monitor.__init__(self, bus, self.run)
def before_request(self):
self.servings.append((serving.request, serving.response))
def after_request(self):
try:
self.servings.remove((serving.request, serving.response))
except ValueError:
pass
def run(self):
"""Check timeout on all responses. (Internal)"""
for req, resp in self.servings:
resp.check_timeout()
engine.timeout_monitor = _TimeoutMonitor(engine)
engine.timeout_monitor.subscribe()
engine.autoreload = process.plugins.Autoreloader(engine)
engine.autoreload.subscribe()
engine.thread_manager = process.plugins.ThreadManager(engine)
engine.thread_manager.subscribe()
engine.signal_handler = process.plugins.SignalHandler(engine)
class _HandleSignalsPlugin(object):
"""Handle signals from other processes based on the configured
platform handlers above."""
def __init__(self, bus):
self.bus = bus
def subscribe(self):
"""Add the handlers based on the platform"""
if hasattr(self.bus, "signal_handler"):
self.bus.signal_handler.subscribe()
if hasattr(self.bus, "console_control_handler"):
self.bus.console_control_handler.subscribe()
engine.signals = _HandleSignalsPlugin(engine)
from cherrypy import _cpserver
server = _cpserver.Server()
server.subscribe()
def quickstart(root=None, script_name="", config=None):
"""Mount the given root, start the builtin server (and engine), then block.
root: an instance of a "controller class" (a collection of page handler
methods) which represents the root of the application.
script_name: a string containing the "mount point" of the application.
This should start with a slash, and be the path portion of the URL
at which to mount the given root. For example, if root.index() will
handle requests to "http://www.example.com:8080/dept/app1/", then
the script_name argument would be "/dept/app1".
It MUST NOT end in a slash. If the script_name refers to the root
of the URI, it MUST be an empty string (not "/").
config: a file or dict containing application config. If this contains
a [global] section, those entries will be used in the global
(site-wide) config.
"""
if config:
_global_conf_alias.update(config)
tree.mount(root, script_name, config)
engine.signals.subscribe()
engine.start()
engine.block()
from cherrypy._cpcompat import threadlocal as _local
class _Serving(_local):
"""An interface for registering request and response objects.
Rather than have a separate "thread local" object for the request and
the response, this class works as a single threadlocal container for
both objects (and any others which developers wish to define). In this
way, we can easily dump those objects when we stop/start a new HTTP
conversation, yet still refer to them as module-level globals in a
thread-safe way.
"""
request = _cprequest.Request(_httputil.Host("127.0.0.1", 80),
_httputil.Host("127.0.0.1", 1111))
"""
The request object for the current thread. In the main thread,
and any threads which are not receiving HTTP requests, this is None."""
response = _cprequest.Response()
"""
The response object for the current thread. In the main thread,
and any threads which are not receiving HTTP requests, this is None."""
def load(self, request, response):
self.request = request
self.response = response
def clear(self):
"""Remove all attributes of self."""
self.__dict__.clear()
serving = _Serving()
class _ThreadLocalProxy(object):
__slots__ = ['__attrname__', '__dict__']
def __init__(self, attrname):
self.__attrname__ = attrname
def __getattr__(self, name):
child = getattr(serving, self.__attrname__)
return getattr(child, name)
def __setattr__(self, name, value):
if name in ("__attrname__", ):
object.__setattr__(self, name, value)
else:
child = getattr(serving, self.__attrname__)
setattr(child, name, value)
def __delattr__(self, name):
child = getattr(serving, self.__attrname__)
delattr(child, name)
def _get_dict(self):
child = getattr(serving, self.__attrname__)
d = child.__class__.__dict__.copy()
d.update(child.__dict__)
return d
__dict__ = property(_get_dict)
def __getitem__(self, key):
child = getattr(serving, self.__attrname__)
return child[key]
def __setitem__(self, key, value):
child = getattr(serving, self.__attrname__)
child[key] = value
def __delitem__(self, key):
child = getattr(serving, self.__attrname__)
del child[key]
def __contains__(self, key):
child = getattr(serving, self.__attrname__)
return key in child
def __len__(self):
child = getattr(serving, self.__attrname__)
return len(child)
def __nonzero__(self):
child = getattr(serving, self.__attrname__)
return bool(child)
# Python 3
__bool__ = __nonzero__
# Create request and response object (the same objects will be used
# throughout the entire life of the webserver, but will redirect
# to the "serving" object)
request = _ThreadLocalProxy('request')
response = _ThreadLocalProxy('response')
# Create thread_data object as a thread-specific all-purpose storage
class _ThreadData(_local):
"""A container for thread-specific data."""
thread_data = _ThreadData()
# Monkeypatch pydoc to allow help() to go through the threadlocal proxy.
# Jan 2007: no Googleable examples of anyone else replacing pydoc.resolve.
# The only other way would be to change what is returned from type(request)
# and that's not possible in pure Python (you'd have to fake ob_type).
def _cherrypy_pydoc_resolve(thing, forceload=0):
"""Given an object or a path to an object, get the object and its name."""
if isinstance(thing, _ThreadLocalProxy):
thing = getattr(serving, thing.__attrname__)
return _pydoc._builtin_resolve(thing, forceload)
try:
import pydoc as _pydoc
_pydoc._builtin_resolve = _pydoc.resolve
_pydoc.resolve = _cherrypy_pydoc_resolve
except ImportError:
pass
from cherrypy import _cplogging
class _GlobalLogManager(_cplogging.LogManager):
"""A site-wide LogManager; routes to app.log or global log as appropriate.
This :class:`LogManager<cherrypy._cplogging.LogManager>` implements
cherrypy.log() and cherrypy.log.access(). If either
function is called during a request, the message will be sent to the
logger for the current Application. If they are called outside of a
request, the message will be sent to the site-wide logger.
"""
def __call__(self, *args, **kwargs):
"""Log the given message to the app.log or global log as appropriate."""
# Do NOT use try/except here. See http://www.cherrypy.org/ticket/945
if hasattr(request, 'app') and hasattr(request.app, 'log'):
log = request.app.log
else:
log = self
return log.error(*args, **kwargs)
def access(self):
"""Log an access message to the app.log or global log as appropriate."""
try:
return request.app.log.access()
except AttributeError:
return _cplogging.LogManager.access(self)
log = _GlobalLogManager()
# Set a default screen handler on the global log.
log.screen = True
log.error_file = ''
# Using an access file makes CP about 10% slower. Leave off by default.
log.access_file = ''
def _buslog(msg, level):
log.error(msg, 'ENGINE', severity=level)
engine.subscribe('log', _buslog)
# Helper functions for CP apps #
def expose(func=None, alias=None):
"""Expose the function, optionally providing an alias or set of aliases."""
def expose_(func):
func.exposed = True
if alias is not None:
if isinstance(alias, basestring):
parents[alias.replace(".", "_")] = func
else:
for a in alias:
parents[a.replace(".", "_")] = func
return func
import sys, types
if isinstance(func, (types.FunctionType, types.MethodType)):
if alias is None:
# @expose
func.exposed = True
return func
else:
# func = expose(func, alias)
parents = sys._getframe(1).f_locals
return expose_(func)
elif func is None:
if alias is None:
# @expose()
parents = sys._getframe(1).f_locals
return expose_
else:
# @expose(alias="alias") or
# @expose(alias=["alias1", "alias2"])
parents = sys._getframe(1).f_locals
return expose_
else:
# @expose("alias") or
# @expose(["alias1", "alias2"])
parents = sys._getframe(1).f_locals
alias = func
return expose_
def popargs(*args, **kwargs):
"""A decorator for _cp_dispatch
(cherrypy.dispatch.Dispatcher.dispatch_method_name).
Optional keyword argument: handler=(Object or Function)
Provides a _cp_dispatch function that pops off path segments into
cherrypy.request.params under the names specified. The dispatch
is then forwarded on to the next vpath element.
Note that any existing (and exposed) member function of the class that
popargs is applied to will override that value of the argument. For
instance, if you have a method named "list" on the class decorated with
popargs, then accessing "/list" will call that function instead of popping
it off as the requested parameter. This restriction applies to all
_cp_dispatch functions. The only way around this restriction is to create
a "blank class" whose only function is to provide _cp_dispatch.
If there are path elements after the arguments, or more arguments
are requested than are available in the vpath, then the 'handler'
keyword argument specifies the next object to handle the parameterized
request. If handler is not specified or is None, then self is used.
If handler is a function rather than an instance, then that function
will be called with the args specified and the return value from that
function used as the next object INSTEAD of adding the parameters to
cherrypy.request.args.
This decorator may be used in one of two ways:
As a class decorator:
@cherrypy.popargs('year', 'month', 'day')
class Blog:
def index(self, year=None, month=None, day=None):
#Process the parameters here; any url like
#/, /2009, /2009/12, or /2009/12/31
#will fill in the appropriate parameters.
def create(self):
#This link will still be available at /create. Defined functions
#take precedence over arguments.
Or as a member of a class:
class Blog:
_cp_dispatch = cherrypy.popargs('year', 'month', 'day')
#...
The handler argument may be used to mix arguments with built in functions.
For instance, the following setup allows different activities at the
day, month, and year level:
class DayHandler:
def index(self, year, month, day):
#Do something with this day; probably list entries
def delete(self, year, month, day):
#Delete all entries for this day
@cherrypy.popargs('day', handler=DayHandler())
class MonthHandler:
def index(self, year, month):
#Do something with this month; probably list entries
def delete(self, year, month):
#Delete all entries for this month
@cherrypy.popargs('month', handler=MonthHandler())
class YearHandler:
def index(self, year):
#Do something with this year
#...
@cherrypy.popargs('year', handler=YearHandler())
class Root:
def index(self):
#...
"""
#Since keyword arg comes after *args, we have to process it ourselves
#for lower versions of python.
handler = None
handler_call = False
for k,v in kwargs.items():
if k == 'handler':
handler = v
else:
raise TypeError(
"cherrypy.popargs() got an unexpected keyword argument '{0}'" \
.format(k)
)
import inspect
if handler is not None \
and (hasattr(handler, '__call__') or inspect.isclass(handler)):
handler_call = True
def decorated(cls_or_self=None, vpath=None):
if inspect.isclass(cls_or_self):
#cherrypy.popargs is a class decorator
cls = cls_or_self
setattr(cls, dispatch.Dispatcher.dispatch_method_name, decorated)
return cls
#We're in the actual function
self = cls_or_self
parms = {}
for arg in args:
if not vpath:
break
parms[arg] = vpath.pop(0)
if handler is not None:
if handler_call:
return handler(**parms)
else:
request.params.update(parms)
return handler
request.params.update(parms)
#If we are the ultimate handler, then to prevent our _cp_dispatch
#from being called again, we will resolve remaining elements through
#getattr() directly.
if vpath:
return getattr(self, vpath.pop(0), None)
else:
return self
return decorated
def url(path="", qs="", script_name=None, base=None, relative=None):
"""Create an absolute URL for the given path.
If 'path' starts with a slash ('/'), this will return
(base + script_name + path + qs).
If it does not start with a slash, this returns
(base + script_name [+ request.path_info] + path + qs).
If script_name is None, cherrypy.request will be used
to find a script_name, if available.
If base is None, cherrypy.request.base will be used (if available).
Note that you can use cherrypy.tools.proxy to change this.
Finally, note that this function can be used to obtain an absolute URL
for the current request path (minus the querystring) by passing no args.
If you call url(qs=cherrypy.request.query_string), you should get the
original browser URL (assuming no internal redirections).
If relative is None or not provided, request.app.relative_urls will
be used (if available, else False). If False, the output will be an
absolute URL (including the scheme, host, vhost, and script_name).
If True, the output will instead be a URL that is relative to the
current request path, perhaps including '..' atoms. If relative is
the string 'server', the output will instead be a URL that is
relative to the server root; i.e., it will start with a slash.
"""
if isinstance(qs, (tuple, list, dict)):
qs = _urlencode(qs)
if qs:
qs = '?' + qs
if request.app:
if not path.startswith("/"):
# Append/remove trailing slash from path_info as needed
# (this is to support mistyped URL's without redirecting;
# if you want to redirect, use tools.trailing_slash).
pi = request.path_info
if request.is_index is True:
if not pi.endswith('/'):
pi = pi + '/'
elif request.is_index is False:
if pi.endswith('/') and pi != '/':
pi = pi[:-1]
if path == "":
path = pi
else:
path = _urljoin(pi, path)
if script_name is None:
script_name = request.script_name
if base is None:
base = request.base
newurl = base + script_name + path + qs
else:
# No request.app (we're being called outside a request).
# We'll have to guess the base from server.* attributes.
# This will produce very different results from the above
# if you're using vhosts or tools.proxy.
if base is None:
base = server.base()
path = (script_name or "") + path
newurl = base + path + qs
if './' in newurl:
# Normalize the URL by removing ./ and ../
atoms = []
for atom in newurl.split('/'):
if atom == '.':
pass
elif atom == '..':
atoms.pop()
else:
atoms.append(atom)
newurl = '/'.join(atoms)
# At this point, we should have a fully-qualified absolute URL.
if relative is None:
relative = getattr(request.app, "relative_urls", False)
# See http://www.ietf.org/rfc/rfc2396.txt
if relative == 'server':
# "A relative reference beginning with a single slash character is
# termed an absolute-path reference, as defined by <abs_path>..."
# This is also sometimes called "server-relative".
newurl = '/' + '/'.join(newurl.split('/', 3)[3:])
elif relative:
# "A relative reference that does not begin with a scheme name
# or a slash character is termed a relative-path reference."
old = url(relative=False).split('/')[:-1]
new = newurl.split('/')
while old and new:
a, b = old[0], new[0]
if a != b:
break
old.pop(0)
new.pop(0)
new = (['..'] * len(old)) + new
newurl = '/'.join(new)
return newurl
# import _cpconfig last so it can reference other top-level objects
from cherrypy import _cpconfig
# Use _global_conf_alias so quickstart can use 'config' as an arg
# without shadowing cherrypy.config.
config = _global_conf_alias = _cpconfig.Config()
config.defaults = {
'tools.log_tracebacks.on': True,
'tools.log_headers.on': True,
'tools.trailing_slash.on': True,
'tools.encode.on': True
}
config.namespaces["log"] = lambda k, v: setattr(log, k, v)
config.namespaces["checker"] = lambda k, v: setattr(checker, k, v)
# Must reset to get our defaults applied.
config.reset()
from cherrypy import _cpchecker
checker = _cpchecker.Checker()
engine.subscribe('start', checker)
|
dgwakeman/mne-python | refs/heads/master | mne/beamformer/__init__.py | 24 | """Beamformers for source localization
"""
from ._lcmv import lcmv, lcmv_epochs, lcmv_raw, tf_lcmv
from ._dics import dics, dics_epochs, dics_source_power, tf_dics
from ._rap_music import rap_music
|
fartashf/python-mode | refs/heads/develop | pymode/libs/logilab/common/configuration.py | 85 | # copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
# contact http://www.logilab.fr/ -- mailto:[email protected]
#
# This file is part of logilab-common.
#
# logilab-common is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 2.1 of the License, or (at your option) any
# later version.
#
# logilab-common is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License along
# with logilab-common. If not, see <http://www.gnu.org/licenses/>.
"""Classes to handle advanced configuration in simple to complex applications.
Allows to load the configuration from a file or from command line
options, to generate a sample configuration file or to display
program's usage. Fills the gap between optik/optparse and ConfigParser
by adding data types (which are also available as a standalone optik
extension in the `optik_ext` module).
Quick start: simplest usage
---------------------------
.. python ::
>>> import sys
>>> from logilab.common.configuration import Configuration
>>> options = [('dothis', {'type':'yn', 'default': True, 'metavar': '<y or n>'}),
... ('value', {'type': 'string', 'metavar': '<string>'}),
... ('multiple', {'type': 'csv', 'default': ('yop',),
... 'metavar': '<comma separated values>',
... 'help': 'you can also document the option'}),
... ('number', {'type': 'int', 'default':2, 'metavar':'<int>'}),
... ]
>>> config = Configuration(options=options, name='My config')
>>> print config['dothis']
True
>>> print config['value']
None
>>> print config['multiple']
('yop',)
>>> print config['number']
2
>>> print config.help()
Usage: [options]
Options:
-h, --help show this help message and exit
--dothis=<y or n>
--value=<string>
--multiple=<comma separated values>
you can also document the option [current: none]
--number=<int>
>>> f = open('myconfig.ini', 'w')
>>> f.write('''[MY CONFIG]
... number = 3
... dothis = no
... multiple = 1,2,3
... ''')
>>> f.close()
>>> config.load_file_configuration('myconfig.ini')
>>> print config['dothis']
False
>>> print config['value']
None
>>> print config['multiple']
['1', '2', '3']
>>> print config['number']
3
>>> sys.argv = ['mon prog', '--value', 'bacon', '--multiple', '4,5,6',
... 'nonoptionargument']
>>> print config.load_command_line_configuration()
['nonoptionargument']
>>> print config['value']
bacon
>>> config.generate_config()
# class for simple configurations which don't need the
# manager / providers model and prefer delegation to inheritance
#
# configuration values are accessible through a dict like interface
#
[MY CONFIG]
dothis=no
value=bacon
# you can also document the option
multiple=4,5,6
number=3
Note : starting with Python 2.7 ConfigParser is able to take into
account the order of occurrences of the options into a file (by
using an OrderedDict). If you have two options changing some common
state, like a 'disable-all-stuff' and a 'enable-some-stuff-a', their
order of appearance will be significant : the last specified in the
file wins. For earlier version of python and logilab.common newer
than 0.61 the behaviour is unspecified.
"""
from __future__ import print_function
__docformat__ = "restructuredtext en"
__all__ = ('OptionsManagerMixIn', 'OptionsProviderMixIn',
'ConfigurationMixIn', 'Configuration',
'OptionsManager2ConfigurationAdapter')
import os
import sys
import re
from os.path import exists, expanduser
from copy import copy
from warnings import warn
from six import string_types
from six.moves import range, configparser as cp, input
from logilab.common.compat import str_encode as _encode
from logilab.common.deprecation import deprecated
from logilab.common.textutils import normalize_text, unquote
from logilab.common import optik_ext
OptionError = optik_ext.OptionError
REQUIRED = []
class UnsupportedAction(Exception):
"""raised by set_option when it doesn't know what to do for an action"""
def _get_encoding(encoding, stream):
encoding = encoding or getattr(stream, 'encoding', None)
if not encoding:
import locale
encoding = locale.getpreferredencoding()
return encoding
# validation functions ########################################################
# validators will return the validated value or raise optparse.OptionValueError
# XXX add to documentation
def choice_validator(optdict, name, value):
"""validate and return a converted value for option of type 'choice'
"""
if not value in optdict['choices']:
msg = "option %s: invalid value: %r, should be in %s"
raise optik_ext.OptionValueError(msg % (name, value, optdict['choices']))
return value
def multiple_choice_validator(optdict, name, value):
"""validate and return a converted value for option of type 'choice'
"""
choices = optdict['choices']
values = optik_ext.check_csv(None, name, value)
for value in values:
if not value in choices:
msg = "option %s: invalid value: %r, should be in %s"
raise optik_ext.OptionValueError(msg % (name, value, choices))
return values
def csv_validator(optdict, name, value):
"""validate and return a converted value for option of type 'csv'
"""
return optik_ext.check_csv(None, name, value)
def yn_validator(optdict, name, value):
"""validate and return a converted value for option of type 'yn'
"""
return optik_ext.check_yn(None, name, value)
def named_validator(optdict, name, value):
"""validate and return a converted value for option of type 'named'
"""
return optik_ext.check_named(None, name, value)
def file_validator(optdict, name, value):
"""validate and return a filepath for option of type 'file'"""
return optik_ext.check_file(None, name, value)
def color_validator(optdict, name, value):
"""validate and return a valid color for option of type 'color'"""
return optik_ext.check_color(None, name, value)
def password_validator(optdict, name, value):
"""validate and return a string for option of type 'password'"""
return optik_ext.check_password(None, name, value)
def date_validator(optdict, name, value):
"""validate and return a mx DateTime object for option of type 'date'"""
return optik_ext.check_date(None, name, value)
def time_validator(optdict, name, value):
"""validate and return a time object for option of type 'time'"""
return optik_ext.check_time(None, name, value)
def bytes_validator(optdict, name, value):
"""validate and return an integer for option of type 'bytes'"""
return optik_ext.check_bytes(None, name, value)
VALIDATORS = {'string': unquote,
'int': int,
'float': float,
'file': file_validator,
'font': unquote,
'color': color_validator,
'regexp': re.compile,
'csv': csv_validator,
'yn': yn_validator,
'bool': yn_validator,
'named': named_validator,
'password': password_validator,
'date': date_validator,
'time': time_validator,
'bytes': bytes_validator,
'choice': choice_validator,
'multiple_choice': multiple_choice_validator,
}
def _call_validator(opttype, optdict, option, value):
if opttype not in VALIDATORS:
raise Exception('Unsupported type "%s"' % opttype)
try:
return VALIDATORS[opttype](optdict, option, value)
except TypeError:
try:
return VALIDATORS[opttype](value)
except optik_ext.OptionValueError:
raise
except:
raise optik_ext.OptionValueError('%s value (%r) should be of type %s' %
(option, value, opttype))
# user input functions ########################################################
# user input functions will ask the user for input on stdin then validate
# the result and return the validated value or raise optparse.OptionValueError
# XXX add to documentation
def input_password(optdict, question='password:'):
from getpass import getpass
while True:
value = getpass(question)
value2 = getpass('confirm: ')
if value == value2:
return value
print('password mismatch, try again')
def input_string(optdict, question):
value = input(question).strip()
return value or None
def _make_input_function(opttype):
def input_validator(optdict, question):
while True:
value = input(question)
if not value.strip():
return None
try:
return _call_validator(opttype, optdict, None, value)
except optik_ext.OptionValueError as ex:
msg = str(ex).split(':', 1)[-1].strip()
print('bad value: %s' % msg)
return input_validator
INPUT_FUNCTIONS = {
'string': input_string,
'password': input_password,
}
for opttype in VALIDATORS.keys():
INPUT_FUNCTIONS.setdefault(opttype, _make_input_function(opttype))
# utility functions ############################################################
def expand_default(self, option):
"""monkey patch OptionParser.expand_default since we have a particular
way to handle defaults to avoid overriding values in the configuration
file
"""
if self.parser is None or not self.default_tag:
return option.help
optname = option._long_opts[0][2:]
try:
provider = self.parser.options_manager._all_options[optname]
except KeyError:
value = None
else:
optdict = provider.get_option_def(optname)
optname = provider.option_attrname(optname, optdict)
value = getattr(provider.config, optname, optdict)
value = format_option_value(optdict, value)
if value is optik_ext.NO_DEFAULT or not value:
value = self.NO_DEFAULT_VALUE
return option.help.replace(self.default_tag, str(value))
def _validate(value, optdict, name=''):
"""return a validated value for an option according to its type
optional argument name is only used for error message formatting
"""
try:
_type = optdict['type']
except KeyError:
# FIXME
return value
return _call_validator(_type, optdict, name, value)
convert = deprecated('[0.60] convert() was renamed _validate()')(_validate)
# format and output functions ##################################################
def comment(string):
"""return string as a comment"""
lines = [line.strip() for line in string.splitlines()]
return '# ' + ('%s# ' % os.linesep).join(lines)
def format_time(value):
if not value:
return '0'
if value != int(value):
return '%.2fs' % value
value = int(value)
nbmin, nbsec = divmod(value, 60)
if nbsec:
return '%ss' % value
nbhour, nbmin_ = divmod(nbmin, 60)
if nbmin_:
return '%smin' % nbmin
nbday, nbhour_ = divmod(nbhour, 24)
if nbhour_:
return '%sh' % nbhour
return '%sd' % nbday
def format_bytes(value):
if not value:
return '0'
if value != int(value):
return '%.2fB' % value
value = int(value)
prevunit = 'B'
for unit in ('KB', 'MB', 'GB', 'TB'):
next, remain = divmod(value, 1024)
if remain:
return '%s%s' % (value, prevunit)
prevunit = unit
value = next
return '%s%s' % (value, unit)
def format_option_value(optdict, value):
"""return the user input's value from a 'compiled' value"""
if isinstance(value, (list, tuple)):
value = ','.join(value)
elif isinstance(value, dict):
value = ','.join(['%s:%s' % (k, v) for k, v in value.items()])
elif hasattr(value, 'match'): # optdict.get('type') == 'regexp'
# compiled regexp
value = value.pattern
elif optdict.get('type') == 'yn':
value = value and 'yes' or 'no'
elif isinstance(value, string_types) and value.isspace():
value = "'%s'" % value
elif optdict.get('type') == 'time' and isinstance(value, (float, int, long)):
value = format_time(value)
elif optdict.get('type') == 'bytes' and hasattr(value, '__int__'):
value = format_bytes(value)
return value
def ini_format_section(stream, section, options, encoding=None, doc=None):
"""format an options section using the INI format"""
encoding = _get_encoding(encoding, stream)
if doc:
print(_encode(comment(doc), encoding), file=stream)
print('[%s]' % section, file=stream)
ini_format(stream, options, encoding)
def ini_format(stream, options, encoding):
"""format options using the INI format"""
for optname, optdict, value in options:
value = format_option_value(optdict, value)
help = optdict.get('help')
if help:
help = normalize_text(help, line_len=79, indent='# ')
print(file=stream)
print(_encode(help, encoding), file=stream)
else:
print(file=stream)
if value is None:
print('#%s=' % optname, file=stream)
else:
value = _encode(value, encoding).strip()
print('%s=%s' % (optname, value), file=stream)
format_section = ini_format_section
def rest_format_section(stream, section, options, encoding=None, doc=None):
"""format an options section using as ReST formatted output"""
encoding = _get_encoding(encoding, stream)
if section:
print('%s\n%s' % (section, "'"*len(section)), file=stream)
if doc:
print(_encode(normalize_text(doc, line_len=79, indent=''), encoding), file=stream)
print(file=stream)
for optname, optdict, value in options:
help = optdict.get('help')
print(':%s:' % optname, file=stream)
if help:
help = normalize_text(help, line_len=79, indent=' ')
print(_encode(help, encoding), file=stream)
if value:
value = _encode(format_option_value(optdict, value), encoding)
print(file=stream)
print(' Default: ``%s``' % value.replace("`` ", "```` ``"), file=stream)
# Options Manager ##############################################################
class OptionsManagerMixIn(object):
"""MixIn to handle a configuration from both a configuration file and
command line options
"""
def __init__(self, usage, config_file=None, version=None, quiet=0):
self.config_file = config_file
self.reset_parsers(usage, version=version)
# list of registered options providers
self.options_providers = []
# dictionary associating option name to checker
self._all_options = {}
self._short_options = {}
self._nocallback_options = {}
self._mygroups = dict()
# verbosity
self.quiet = quiet
self._maxlevel = 0
def reset_parsers(self, usage='', version=None):
# configuration file parser
self.cfgfile_parser = cp.ConfigParser()
# command line parser
self.cmdline_parser = optik_ext.OptionParser(usage=usage, version=version)
self.cmdline_parser.options_manager = self
self._optik_option_attrs = set(self.cmdline_parser.option_class.ATTRS)
def register_options_provider(self, provider, own_group=True):
"""register an options provider"""
assert provider.priority <= 0, "provider's priority can't be >= 0"
for i in range(len(self.options_providers)):
if provider.priority > self.options_providers[i].priority:
self.options_providers.insert(i, provider)
break
else:
self.options_providers.append(provider)
non_group_spec_options = [option for option in provider.options
if 'group' not in option[1]]
groups = getattr(provider, 'option_groups', ())
if own_group and non_group_spec_options:
self.add_option_group(provider.name.upper(), provider.__doc__,
non_group_spec_options, provider)
else:
for opt, optdict in non_group_spec_options:
self.add_optik_option(provider, self.cmdline_parser, opt, optdict)
for gname, gdoc in groups:
gname = gname.upper()
goptions = [option for option in provider.options
if option[1].get('group', '').upper() == gname]
self.add_option_group(gname, gdoc, goptions, provider)
def add_option_group(self, group_name, doc, options, provider):
"""add an option group including the listed options
"""
assert options
# add option group to the command line parser
if group_name in self._mygroups:
group = self._mygroups[group_name]
else:
group = optik_ext.OptionGroup(self.cmdline_parser,
title=group_name.capitalize())
self.cmdline_parser.add_option_group(group)
group.level = provider.level
self._mygroups[group_name] = group
# add section to the config file
if group_name != "DEFAULT":
self.cfgfile_parser.add_section(group_name)
# add provider's specific options
for opt, optdict in options:
self.add_optik_option(provider, group, opt, optdict)
def add_optik_option(self, provider, optikcontainer, opt, optdict):
if 'inputlevel' in optdict:
warn('[0.50] "inputlevel" in option dictionary for %s is deprecated,'
' use "level"' % opt, DeprecationWarning)
optdict['level'] = optdict.pop('inputlevel')
args, optdict = self.optik_option(provider, opt, optdict)
option = optikcontainer.add_option(*args, **optdict)
self._all_options[opt] = provider
self._maxlevel = max(self._maxlevel, option.level or 0)
def optik_option(self, provider, opt, optdict):
"""get our personal option definition and return a suitable form for
use with optik/optparse
"""
optdict = copy(optdict)
others = {}
if 'action' in optdict:
self._nocallback_options[provider] = opt
else:
optdict['action'] = 'callback'
optdict['callback'] = self.cb_set_provider_option
# default is handled here and *must not* be given to optik if you
# want the whole machinery to work
if 'default' in optdict:
if ('help' in optdict
and optdict.get('default') is not None
and not optdict['action'] in ('store_true', 'store_false')):
optdict['help'] += ' [current: %default]'
del optdict['default']
args = ['--' + str(opt)]
if 'short' in optdict:
self._short_options[optdict['short']] = opt
args.append('-' + optdict['short'])
del optdict['short']
# cleanup option definition dict before giving it to optik
for key in list(optdict.keys()):
if not key in self._optik_option_attrs:
optdict.pop(key)
return args, optdict
def cb_set_provider_option(self, option, opt, value, parser):
"""optik callback for option setting"""
if opt.startswith('--'):
# remove -- on long option
opt = opt[2:]
else:
# short option, get its long equivalent
opt = self._short_options[opt[1:]]
# trick since we can't set action='store_true' on options
if value is None:
value = 1
self.global_set_option(opt, value)
def global_set_option(self, opt, value):
"""set option on the correct option provider"""
self._all_options[opt].set_option(opt, value)
def generate_config(self, stream=None, skipsections=(), encoding=None):
"""write a configuration file according to the current configuration
into the given stream or stdout
"""
options_by_section = {}
sections = []
for provider in self.options_providers:
for section, options in provider.options_by_section():
if section is None:
section = provider.name
if section in skipsections:
continue
options = [(n, d, v) for (n, d, v) in options
if d.get('type') is not None]
if not options:
continue
if not section in sections:
sections.append(section)
alloptions = options_by_section.setdefault(section, [])
alloptions += options
stream = stream or sys.stdout
encoding = _get_encoding(encoding, stream)
printed = False
for section in sections:
if printed:
print('\n', file=stream)
format_section(stream, section.upper(), options_by_section[section],
encoding)
printed = True
def generate_manpage(self, pkginfo, section=1, stream=None):
"""write a man page for the current configuration into the given
stream or stdout
"""
self._monkeypatch_expand_default()
try:
optik_ext.generate_manpage(self.cmdline_parser, pkginfo,
section, stream=stream or sys.stdout,
level=self._maxlevel)
finally:
self._unmonkeypatch_expand_default()
# initialization methods ##################################################
def load_provider_defaults(self):
"""initialize configuration using default values"""
for provider in self.options_providers:
provider.load_defaults()
def load_file_configuration(self, config_file=None):
"""load the configuration from file"""
self.read_config_file(config_file)
self.load_config_file()
def read_config_file(self, config_file=None):
"""read the configuration file but do not load it (i.e. dispatching
values to each options provider)
"""
helplevel = 1
while helplevel <= self._maxlevel:
opt = '-'.join(['long'] * helplevel) + '-help'
if opt in self._all_options:
break # already processed
def helpfunc(option, opt, val, p, level=helplevel):
print(self.help(level))
sys.exit(0)
helpmsg = '%s verbose help.' % ' '.join(['more'] * helplevel)
optdict = {'action' : 'callback', 'callback' : helpfunc,
'help' : helpmsg}
provider = self.options_providers[0]
self.add_optik_option(provider, self.cmdline_parser, opt, optdict)
provider.options += ( (opt, optdict), )
helplevel += 1
if config_file is None:
config_file = self.config_file
if config_file is not None:
config_file = expanduser(config_file)
if config_file and exists(config_file):
parser = self.cfgfile_parser
parser.read([config_file])
# normalize sections'title
for sect, values in parser._sections.items():
if not sect.isupper() and values:
parser._sections[sect.upper()] = values
elif not self.quiet:
msg = 'No config file found, using default configuration'
print(msg, file=sys.stderr)
return
def input_config(self, onlysection=None, inputlevel=0, stream=None):
"""interactively get configuration values by asking to the user and generate
a configuration file
"""
if onlysection is not None:
onlysection = onlysection.upper()
for provider in self.options_providers:
for section, option, optdict in provider.all_options():
if onlysection is not None and section != onlysection:
continue
if not 'type' in optdict:
# ignore action without type (callback, store_true...)
continue
provider.input_option(option, optdict, inputlevel)
# now we can generate the configuration file
if stream is not None:
self.generate_config(stream)
def load_config_file(self):
"""dispatch values previously read from a configuration file to each
options provider)
"""
parser = self.cfgfile_parser
for section in parser.sections():
for option, value in parser.items(section):
try:
self.global_set_option(option, value)
except (KeyError, OptionError):
# TODO handle here undeclared options appearing in the config file
continue
def load_configuration(self, **kwargs):
"""override configuration according to given parameters
"""
for opt, opt_value in kwargs.items():
opt = opt.replace('_', '-')
provider = self._all_options[opt]
provider.set_option(opt, opt_value)
def load_command_line_configuration(self, args=None):
"""override configuration according to command line parameters
return additional arguments
"""
self._monkeypatch_expand_default()
try:
if args is None:
args = sys.argv[1:]
else:
args = list(args)
(options, args) = self.cmdline_parser.parse_args(args=args)
for provider in self._nocallback_options.keys():
config = provider.config
for attr in config.__dict__.keys():
value = getattr(options, attr, None)
if value is None:
continue
setattr(config, attr, value)
return args
finally:
self._unmonkeypatch_expand_default()
# help methods ############################################################
def add_help_section(self, title, description, level=0):
"""add a dummy option section for help purpose """
group = optik_ext.OptionGroup(self.cmdline_parser,
title=title.capitalize(),
description=description)
group.level = level
self._maxlevel = max(self._maxlevel, level)
self.cmdline_parser.add_option_group(group)
def _monkeypatch_expand_default(self):
# monkey patch optik_ext to deal with our default values
try:
self.__expand_default_backup = optik_ext.HelpFormatter.expand_default
optik_ext.HelpFormatter.expand_default = expand_default
except AttributeError:
# python < 2.4: nothing to be done
pass
def _unmonkeypatch_expand_default(self):
# remove monkey patch
if hasattr(optik_ext.HelpFormatter, 'expand_default'):
# unpatch optik_ext to avoid side effects
optik_ext.HelpFormatter.expand_default = self.__expand_default_backup
def help(self, level=0):
"""return the usage string for available options """
self.cmdline_parser.formatter.output_level = level
self._monkeypatch_expand_default()
try:
return self.cmdline_parser.format_help()
finally:
self._unmonkeypatch_expand_default()
class Method(object):
"""used to ease late binding of default method (so you can define options
on the class using default methods on the configuration instance)
"""
def __init__(self, methname):
self.method = methname
self._inst = None
def bind(self, instance):
"""bind the method to its instance"""
if self._inst is None:
self._inst = instance
def __call__(self, *args, **kwargs):
assert self._inst, 'unbound method'
return getattr(self._inst, self.method)(*args, **kwargs)
# Options Provider #############################################################
class OptionsProviderMixIn(object):
"""Mixin to provide options to an OptionsManager"""
# those attributes should be overridden
priority = -1
name = 'default'
options = ()
level = 0
def __init__(self):
self.config = optik_ext.Values()
for option in self.options:
try:
option, optdict = option
except ValueError:
raise Exception('Bad option: %r' % option)
if isinstance(optdict.get('default'), Method):
optdict['default'].bind(self)
elif isinstance(optdict.get('callback'), Method):
optdict['callback'].bind(self)
self.load_defaults()
def load_defaults(self):
"""initialize the provider using default values"""
for opt, optdict in self.options:
action = optdict.get('action')
if action != 'callback':
# callback action have no default
default = self.option_default(opt, optdict)
if default is REQUIRED:
continue
self.set_option(opt, default, action, optdict)
def option_default(self, opt, optdict=None):
"""return the default value for an option"""
if optdict is None:
optdict = self.get_option_def(opt)
default = optdict.get('default')
if callable(default):
default = default()
return default
def option_attrname(self, opt, optdict=None):
"""get the config attribute corresponding to opt
"""
if optdict is None:
optdict = self.get_option_def(opt)
return optdict.get('dest', opt.replace('-', '_'))
option_name = deprecated('[0.60] OptionsProviderMixIn.option_name() was renamed to option_attrname()')(option_attrname)
def option_value(self, opt):
"""get the current value for the given option"""
return getattr(self.config, self.option_attrname(opt), None)
def set_option(self, opt, value, action=None, optdict=None):
"""method called to set an option (registered in the options list)
"""
if optdict is None:
optdict = self.get_option_def(opt)
if value is not None:
value = _validate(value, optdict, opt)
if action is None:
action = optdict.get('action', 'store')
if optdict.get('type') == 'named': # XXX need specific handling
optname = self.option_attrname(opt, optdict)
currentvalue = getattr(self.config, optname, None)
if currentvalue:
currentvalue.update(value)
value = currentvalue
if action == 'store':
setattr(self.config, self.option_attrname(opt, optdict), value)
elif action in ('store_true', 'count'):
setattr(self.config, self.option_attrname(opt, optdict), 0)
elif action == 'store_false':
setattr(self.config, self.option_attrname(opt, optdict), 1)
elif action == 'append':
opt = self.option_attrname(opt, optdict)
_list = getattr(self.config, opt, None)
if _list is None:
if isinstance(value, (list, tuple)):
_list = value
elif value is not None:
_list = []
_list.append(value)
setattr(self.config, opt, _list)
elif isinstance(_list, tuple):
setattr(self.config, opt, _list + (value,))
else:
_list.append(value)
elif action == 'callback':
optdict['callback'](None, opt, value, None)
else:
raise UnsupportedAction(action)
def input_option(self, option, optdict, inputlevel=99):
default = self.option_default(option, optdict)
if default is REQUIRED:
defaultstr = '(required): '
elif optdict.get('level', 0) > inputlevel:
return
elif optdict['type'] == 'password' or default is None:
defaultstr = ': '
else:
defaultstr = '(default: %s): ' % format_option_value(optdict, default)
print(':%s:' % option)
print(optdict.get('help') or option)
inputfunc = INPUT_FUNCTIONS[optdict['type']]
value = inputfunc(optdict, defaultstr)
while default is REQUIRED and not value:
print('please specify a value')
value = inputfunc(optdict, '%s: ' % option)
if value is None and default is not None:
value = default
self.set_option(option, value, optdict=optdict)
def get_option_def(self, opt):
"""return the dictionary defining an option given it's name"""
assert self.options
for option in self.options:
if option[0] == opt:
return option[1]
raise OptionError('no such option %s in section %r'
% (opt, self.name), opt)
def all_options(self):
"""return an iterator on available options for this provider
option are actually described by a 3-uple:
(section, option name, option dictionary)
"""
for section, options in self.options_by_section():
if section is None:
if self.name is None:
continue
section = self.name.upper()
for option, optiondict, value in options:
yield section, option, optiondict
def options_by_section(self):
"""return an iterator on options grouped by section
(section, [list of (optname, optdict, optvalue)])
"""
sections = {}
for optname, optdict in self.options:
sections.setdefault(optdict.get('group'), []).append(
(optname, optdict, self.option_value(optname)))
if None in sections:
yield None, sections.pop(None)
for section, options in sections.items():
yield section.upper(), options
def options_and_values(self, options=None):
if options is None:
options = self.options
for optname, optdict in options:
yield (optname, optdict, self.option_value(optname))
# configuration ################################################################
class ConfigurationMixIn(OptionsManagerMixIn, OptionsProviderMixIn):
"""basic mixin for simple configurations which don't need the
manager / providers model
"""
def __init__(self, *args, **kwargs):
if not args:
kwargs.setdefault('usage', '')
kwargs.setdefault('quiet', 1)
OptionsManagerMixIn.__init__(self, *args, **kwargs)
OptionsProviderMixIn.__init__(self)
if not getattr(self, 'option_groups', None):
self.option_groups = []
for option, optdict in self.options:
try:
gdef = (optdict['group'].upper(), '')
except KeyError:
continue
if not gdef in self.option_groups:
self.option_groups.append(gdef)
self.register_options_provider(self, own_group=False)
def register_options(self, options):
"""add some options to the configuration"""
options_by_group = {}
for optname, optdict in options:
options_by_group.setdefault(optdict.get('group', self.name.upper()), []).append((optname, optdict))
for group, options in options_by_group.items():
self.add_option_group(group, None, options, self)
self.options += tuple(options)
def load_defaults(self):
OptionsProviderMixIn.load_defaults(self)
def __iter__(self):
return iter(self.config.__dict__.iteritems())
def __getitem__(self, key):
try:
return getattr(self.config, self.option_attrname(key))
except (optik_ext.OptionValueError, AttributeError):
raise KeyError(key)
def __setitem__(self, key, value):
self.set_option(key, value)
def get(self, key, default=None):
try:
return getattr(self.config, self.option_attrname(key))
except (OptionError, AttributeError):
return default
class Configuration(ConfigurationMixIn):
"""class for simple configurations which don't need the
manager / providers model and prefer delegation to inheritance
configuration values are accessible through a dict like interface
"""
def __init__(self, config_file=None, options=None, name=None,
usage=None, doc=None, version=None):
if options is not None:
self.options = options
if name is not None:
self.name = name
if doc is not None:
self.__doc__ = doc
super(Configuration, self).__init__(config_file=config_file, usage=usage, version=version)
class OptionsManager2ConfigurationAdapter(object):
"""Adapt an option manager to behave like a
`logilab.common.configuration.Configuration` instance
"""
def __init__(self, provider):
self.config = provider
def __getattr__(self, key):
return getattr(self.config, key)
def __getitem__(self, key):
provider = self.config._all_options[key]
try:
return getattr(provider.config, provider.option_attrname(key))
except AttributeError:
raise KeyError(key)
def __setitem__(self, key, value):
self.config.global_set_option(self.config.option_attrname(key), value)
def get(self, key, default=None):
provider = self.config._all_options[key]
try:
return getattr(provider.config, provider.option_attrname(key))
except AttributeError:
return default
# other functions ##############################################################
def read_old_config(newconfig, changes, configfile):
"""initialize newconfig from a deprecated configuration file
possible changes:
* ('renamed', oldname, newname)
* ('moved', option, oldgroup, newgroup)
* ('typechanged', option, oldtype, newvalue)
"""
# build an index of changes
changesindex = {}
for action in changes:
if action[0] == 'moved':
option, oldgroup, newgroup = action[1:]
changesindex.setdefault(option, []).append((action[0], oldgroup, newgroup))
continue
if action[0] == 'renamed':
oldname, newname = action[1:]
changesindex.setdefault(newname, []).append((action[0], oldname))
continue
if action[0] == 'typechanged':
option, oldtype, newvalue = action[1:]
changesindex.setdefault(option, []).append((action[0], oldtype, newvalue))
continue
if action[1] in ('added', 'removed'):
continue # nothing to do here
raise Exception('unknown change %s' % action[0])
# build a config object able to read the old config
options = []
for optname, optdef in newconfig.options:
for action in changesindex.pop(optname, ()):
if action[0] == 'moved':
oldgroup, newgroup = action[1:]
optdef = optdef.copy()
optdef['group'] = oldgroup
elif action[0] == 'renamed':
optname = action[1]
elif action[0] == 'typechanged':
oldtype = action[1]
optdef = optdef.copy()
optdef['type'] = oldtype
options.append((optname, optdef))
if changesindex:
raise Exception('unapplied changes: %s' % changesindex)
oldconfig = Configuration(options=options, name=newconfig.name)
# read the old config
oldconfig.load_file_configuration(configfile)
# apply values reverting changes
changes.reverse()
done = set()
for action in changes:
if action[0] == 'renamed':
oldname, newname = action[1:]
newconfig[newname] = oldconfig[oldname]
done.add(newname)
elif action[0] == 'typechanged':
optname, oldtype, newvalue = action[1:]
newconfig[optname] = newvalue
done.add(optname)
for optname, optdef in newconfig.options:
if optdef.get('type') and not optname in done:
newconfig.set_option(optname, oldconfig[optname], optdict=optdef)
def merge_options(options, optgroup=None):
"""preprocess a list of options and remove duplicates, returning a new list
(tuple actually) of options.
Options dictionaries are copied to avoid later side-effect. Also, if
`otpgroup` argument is specified, ensure all options are in the given group.
"""
alloptions = {}
options = list(options)
for i in range(len(options)-1, -1, -1):
optname, optdict = options[i]
if optname in alloptions:
options.pop(i)
alloptions[optname].update(optdict)
else:
optdict = optdict.copy()
options[i] = (optname, optdict)
alloptions[optname] = optdict
if optgroup is not None:
alloptions[optname]['group'] = optgroup
return tuple(options)
|
browseinfo/odoo_saas3_nicolas | refs/heads/master | addons/website_event/__openerp__.py | 68 | # -*- coding: utf-8 -*-
{
'name': 'Online Events',
'category': 'Website',
'summary': 'Schedule, Promote and Sell Events',
'version': '1.0',
'description': """
Online Events
""",
'author': 'OpenERP SA',
'depends': ['website', 'website_partner', 'website_mail', 'event'],
'data': [
'data/event_data.xml',
'views/website_event.xml',
'views/website_event_sale_backend.xml',
'security/ir.model.access.csv',
'security/website_event.xml',
],
'qweb': ['static/src/xml/*.xml'],
'demo': [
'data/event_demo.xml'
],
'installable': True,
'application': True,
}
|
brianrodri/oppia | refs/heads/develop | core/domain/rules_registry.py | 2 | # coding: utf-8
#
# Copyright 2020 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Registry for rules and their related specification files."""
from __future__ import absolute_import # pylint: disable=import-only-modules
from __future__ import unicode_literals # pylint: disable=import-only-modules
import json
import os
import feconf
import python_utils
import utils
class Registry(python_utils.OBJECT):
"""Registry of rules."""
_state_schema_version_to_html_field_types_to_rule_specs = {}
@classmethod
def get_html_field_types_to_rule_specs(cls, state_schema_version=None):
"""Returns a dict containing a html_field_types_to_rule_specs dict of
the specified state schema verison, if available.
Args:
state_schema_version: int|None. The state schema version to retrieve
the html_field_types_to_rule_specs for. If None, the current
state schema version's html_field_types_to_rule_specs will be
returned.
Returns:
dict. The html_field_types_to_rule_specs specs for the given state
schema version.
Raises:
Exception. No html_field_types_to_rule_specs json file found for the
given state schema version.
"""
cached = (
state_schema_version in
cls._state_schema_version_to_html_field_types_to_rule_specs)
if not cached and state_schema_version is None:
cls._state_schema_version_to_html_field_types_to_rule_specs[
state_schema_version] = json.loads(
utils.get_file_contents(
feconf.HTML_FIELD_TYPES_TO_RULE_SPECS_FILE_PATH)
)
elif not cached:
file_name = 'html_field_types_to_rule_specs_state_v%i.json' % (
state_schema_version)
spec_file = os.path.join(
feconf.LEGACY_HTML_FIELD_TYPES_TO_RULE_SPECS_FILE_PATH_FILE_DIR,
file_name)
try:
with python_utils.open_file(spec_file, 'r') as f:
specs_from_json = json.loads(f.read())
except:
raise Exception(
'No specs json file found for state schema v%i' %
state_schema_version)
cls._state_schema_version_to_html_field_types_to_rule_specs[
state_schema_version] = specs_from_json
return cls._state_schema_version_to_html_field_types_to_rule_specs[
state_schema_version]
|
skeeso/php-buildpack | refs/heads/master | extensions/composer/extension.py | 10 | # Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Composer Extension
Downloads, installs and runs Composer.
"""
import os
import os.path
import sys
import logging
import re
import json
import StringIO
from build_pack_utils import utils
from build_pack_utils import stream_output
from extension_helpers import ExtensionHelper
_log = logging.getLogger('composer')
def find_composer_paths(path):
json_path = None
lock_path = None
for root, dirs, files in os.walk(path):
files.sort()
if 'vendor' in dirs:
dirs.remove('vendor')
contains_json = 'composer.json' in files
contains_lock = 'composer.lock' in files
if contains_json and contains_lock:
json_path = os.path.join(root, 'composer.json')
lock_path = os.path.join(root, 'composer.lock')
return (json_path, lock_path)
elif contains_json:
json_path = os.path.join(root, 'composer.json')
lock_path = None
elif contains_lock:
lock_path = os.path.join(root, 'composer.lock')
json_path = None
return (json_path, lock_path)
class ComposerConfiguration(object):
def __init__(self, ctx):
self._ctx = ctx
self._log = _log
self._init_composer_paths()
def _init_composer_paths(self):
(self.json_path, self.lock_path) = \
find_composer_paths(self._ctx['BUILD_DIR'])
def read_exts_from_path(self, path):
exts = []
if path:
req_pat = re.compile(r'"require"\s?\:\s?\{(.*?)\}', re.DOTALL)
ext_pat = re.compile(r'"ext-(.*?)"')
with open(path, 'rt') as fp:
data = fp.read()
for req_match in req_pat.finditer(data):
for ext_match in ext_pat.finditer(req_match.group(1)):
exts.append(ext_match.group(1))
return exts
def read_version_from_composer_json(self, key):
composer_json = json.load(open(self.json_path, 'r'))
require = composer_json.get('require', {})
return require.get(key, None)
def read_version_from_composer_lock(self, key):
composer_json = json.load(open(self.lock_path, 'r'))
platform = composer_json.get('platform', {})
return platform.get(key, None)
def pick_php_version(self, requested):
selected = None
if requested is None:
selected = self._ctx['PHP_VERSION']
elif requested == '5.3.*' or requested == '>=5.3':
selected = self._ctx['PHP_54_LATEST']
elif requested == '5.4.*' or requested == '>=5.4':
selected = self._ctx['PHP_54_LATEST']
elif requested == '5.5.*' or requested == '>=5.5':
selected = self._ctx['PHP_55_LATEST']
elif requested == '5.6.*' or requested == '>=5.6':
selected = self._ctx['PHP_56_LATEST']
elif requested.startswith('5.4.'):
selected = requested
elif requested.startswith('5.5.'):
selected = requested
elif requested.startswith('5.6.'):
selected = requested
else:
selected = self._ctx['PHP_VERSION']
return selected
def _read_version_from_composer(self, key):
if self.json_path:
return self.read_version_from_composer_json(key)
elif self.lock_path:
return self.read_version_from_composer_lock(key)
def configure(self):
if self.json_path or self.lock_path:
exts = []
# include any existing extensions
exts.extend(self._ctx.get('PHP_EXTENSIONS', []))
# add 'openssl' extension
exts.append('openssl')
# add platform extensions from composer.json & composer.lock
exts.extend(self.read_exts_from_path(self.json_path))
exts.extend(self.read_exts_from_path(self.lock_path))
hhvm_version = self._read_version_from_composer('hhvm')
if hhvm_version:
self._ctx['PHP_VM'] = 'hhvm'
self._log.debug('Composer picked HHVM Version [%s]',
hhvm_version)
else:
# update context with new list of extensions,
# if composer.json exists
php_version = self._read_version_from_composer('php')
self._log.debug('Composer picked PHP Version [%s]',
php_version)
self._ctx['PHP_VERSION'] = self.pick_php_version(php_version)
self._ctx['PHP_EXTENSIONS'] = utils.unique(exts)
self._ctx['PHP_VM'] = 'php'
class ComposerExtension(ExtensionHelper):
def __init__(self, ctx):
ExtensionHelper.__init__(self, ctx)
self._log = _log
def _defaults(self):
return {
'COMPOSER_VERSION': '1.0.0-alpha10',
'COMPOSER_PACKAGE': 'composer.phar',
'COMPOSER_DOWNLOAD_URL': '{DOWNLOAD_URL}/composer/'
'{COMPOSER_VERSION}/{COMPOSER_PACKAGE}',
'COMPOSER_INSTALL_OPTIONS': ['--no-interaction', '--no-dev'],
'COMPOSER_VENDOR_DIR': '{BUILD_DIR}/{LIBDIR}/vendor',
'COMPOSER_BIN_DIR': '{BUILD_DIR}/php/bin',
'COMPOSER_CACHE_DIR': '{CACHE_DIR}/composer'
}
def _should_compile(self):
(json_path, lock_path) = \
find_composer_paths(self._ctx['BUILD_DIR'])
return (json_path is not None or lock_path is not None)
def _compile(self, install):
self._builder = install.builder
self.composer_runner = ComposerCommandRunner(self._ctx, self._builder)
self.move_local_vendor_folder()
self.install()
self.run()
def move_local_vendor_folder(self):
vendor_path = os.path.join(self._ctx['BUILD_DIR'],
self._ctx['WEBDIR'],
'vendor')
if os.path.exists(vendor_path):
self._log.debug("Vendor [%s] exists, moving to LIBDIR",
vendor_path)
(self._builder.move()
.under('{BUILD_DIR}/{WEBDIR}')
.into('{BUILD_DIR}/{LIBDIR}')
.where_name_matches('^%s/.*$' % vendor_path)
.done())
def install(self):
self._builder.install().package('PHP').done()
if self._ctx['COMPOSER_VERSION'] == 'latest':
dependencies_path = os.path.join(self._ctx['BP_DIR'],
'dependencies')
if os.path.exists(dependencies_path):
raise RuntimeError('"COMPOSER_VERSION": "latest" ' \
'is not supported in the cached buildpack. Please vendor your preferred version of composer with your app, or use the provided default composer version.')
self._ctx['COMPOSER_DOWNLOAD_URL'] = \
'https://getcomposer.org/composer.phar'
self._builder.install()._installer.install_binary_direct(
self._ctx['COMPOSER_DOWNLOAD_URL'], None,
os.path.join(self._ctx['BUILD_DIR'], 'php', 'bin'),
extract=False)
else:
self._builder.install()._installer._install_binary_from_manifest(
self._ctx['COMPOSER_DOWNLOAD_URL'],
os.path.join(self._ctx['BUILD_DIR'], 'php', 'bin'),
extract=False)
def _github_oauth_token_is_valid(self, candidate_oauth_token):
stringio_writer = StringIO.StringIO()
curl_command = 'curl -H "Authorization: token %s" ' \
'https://api.github.com/rate_limit' % candidate_oauth_token
stream_output(stringio_writer,
curl_command,
env=os.environ,
cwd=self._ctx['BUILD_DIR'],
shell=True)
github_response = stringio_writer.getvalue()
github_response_json = json.loads(github_response)
return 'resources' in github_response_json
def _github_rate_exceeded(self, token_is_valid):
stringio_writer = StringIO.StringIO()
if token_is_valid:
candidate_oauth_token = os.getenv('COMPOSER_GITHUB_OAUTH_TOKEN')
curl_command = 'curl -H "Authorization: token %s" ' \
'https://api.github.com/rate_limit' % candidate_oauth_token
else:
curl_command = 'curl https://api.github.com/rate_limit'
stream_output(stringio_writer,
curl_command,
env=os.environ,
cwd=self._ctx['BUILD_DIR'],
shell=True)
github_response = stringio_writer.getvalue()
github_response_json = json.loads(github_response)
rate = github_response_json['rate']
num_remaining = rate['remaining']
return num_remaining <= 0
def setup_composer_github_token(self):
github_oauth_token = os.getenv('COMPOSER_GITHUB_OAUTH_TOKEN')
if self._github_oauth_token_is_valid(github_oauth_token):
print('-----> Using custom GitHub OAuth token in'
' $COMPOSER_GITHUB_OAUTH_TOKEN')
self.composer_runner.run('config', '-g',
'github-oauth.github.com',
'"%s"' % github_oauth_token)
return True
else:
print('-----> The GitHub OAuth token supplied from '
'$COMPOSER_GITHUB_OAUTH_TOKEN is invalid')
return False
def check_github_rate_exceeded(self, token_is_valid):
if self._github_rate_exceeded(token_is_valid):
print('-----> The GitHub api rate limit has been exceeded. '
'Composer will continue by downloading from source, which might result in slower downloads. '
'You can increase your rate limit with a GitHub OAuth token. '
'Please obtain a GitHub OAuth token by registering your application at '
'https://github.com/settings/applications/new. '
'Then set COMPOSER_GITHUB_OAUTH_TOKEN in your environment to the value of this token.')
def run(self):
# Move composer files out of WEBDIR
(self._builder.move()
.under('{BUILD_DIR}/{WEBDIR}')
.where_name_is('composer.json')
.into('BUILD_DIR')
.done())
(self._builder.move()
.under('{BUILD_DIR}/{WEBDIR}')
.where_name_is('composer.lock')
.into('BUILD_DIR')
.done())
# Sanity Checks
if not os.path.exists(os.path.join(self._ctx['BUILD_DIR'],
'composer.lock')):
msg = (
'PROTIP: Include a `composer.lock` file with your '
'application! This will make sure the exact same version '
'of dependencies are used when you deploy to CloudFoundry.')
self._log.warning(msg)
print msg
# dump composer version, if in debug mode
if self._ctx.get('BP_DEBUG', False):
self.composer_runner.run('-V')
if not os.path.exists(os.path.join(self._ctx['BP_DIR'], 'dependencies')):
token_is_valid = False
# config composer to use github token, if provided
if os.getenv('COMPOSER_GITHUB_OAUTH_TOKEN', False):
token_is_valid = self.setup_composer_github_token()
# check that the api rate limit has not been exceeded, otherwise exit
self.check_github_rate_exceeded(token_is_valid)
# install dependencies w/Composer
self.composer_runner.run('install', '--no-progress',
*self._ctx['COMPOSER_INSTALL_OPTIONS'])
class ComposerCommandRunner(object):
def __init__(self, ctx, builder):
self._log = _log
self._ctx = ctx
self._strategy = HHVMComposerStrategy(ctx) \
if ctx['PHP_VM'] == 'hhvm' else PHPComposerStrategy(ctx)
self._php_path = self._strategy.binary_path()
self._composer_path = os.path.join(ctx['BUILD_DIR'], 'php',
'bin', 'composer.phar')
self._strategy.write_config(builder)
def _build_composer_environment(self):
env = {}
for key in os.environ.keys():
val = self._ctx.get(key, '')
env[key] = val if type(val) == str else json.dumps(val)
# add basic composer vars
env['COMPOSER_VENDOR_DIR'] = self._ctx['COMPOSER_VENDOR_DIR']
env['COMPOSER_BIN_DIR'] = self._ctx['COMPOSER_BIN_DIR']
env['COMPOSER_CACHE_DIR'] = self._ctx['COMPOSER_CACHE_DIR']
# prevent key system variables from being overridden
env['LD_LIBRARY_PATH'] = self._strategy.ld_library_path()
env['PHPRC'] = self._ctx['TMPDIR']
env['PATH'] = ':'.join(filter(None,
[env.get('PATH', ''),
os.path.dirname(self._php_path)]))
self._log.debug("ENV IS: %s",
'\n'.join(["%s=%s (%s)" % (key, val, type(val))
for (key, val) in env.iteritems()]))
return env
def run(self, *args):
try:
cmd = [self._php_path, self._composer_path]
cmd.extend(args)
self._log.debug("Running command [%s]", ' '.join(cmd))
stream_output(sys.stdout,
' '.join(cmd),
env=self._build_composer_environment(),
cwd=self._ctx['BUILD_DIR'],
shell=True)
except:
print "-----> Composer command failed"
raise
class HHVMComposerStrategy(object):
def __init__(self, ctx):
self._ctx = ctx
def binary_path(self):
return os.path.join(
self._ctx['BUILD_DIR'], 'hhvm', 'usr', 'bin', 'hhvm')
def write_config(self, builder):
pass
def ld_library_path(self):
return os.path.join(
self._ctx['BUILD_DIR'], 'hhvm', 'usr', 'lib', 'hhvm')
class PHPComposerStrategy(object):
def __init__(self, ctx):
self._ctx = ctx
def binary_path(self):
return os.path.join(
self._ctx['BUILD_DIR'], 'php', 'bin', 'php')
def write_config(self, builder):
# rewrite a temp copy of php.ini for use by composer
(builder.copy()
.under('{BUILD_DIR}/php/etc')
.where_name_is('php.ini')
.into('TMPDIR')
.done())
utils.rewrite_cfgs(os.path.join(self._ctx['TMPDIR'], 'php.ini'),
{'TMPDIR': self._ctx['TMPDIR'],
'HOME': self._ctx['BUILD_DIR']},
delim='@')
def ld_library_path(self):
return os.path.join(
self._ctx['BUILD_DIR'], 'php', 'lib')
# Extension Methods
def configure(ctx):
config = ComposerConfiguration(ctx)
config.configure()
def preprocess_commands(ctx):
composer = ComposerExtension(ctx)
return composer.preprocess_commands()
def service_commands(ctx):
composer = ComposerExtension(ctx)
return composer.service_commands()
def service_environment(ctx):
composer = ComposerExtension(ctx)
return composer.service_environment()
def compile(install):
composer = ComposerExtension(install.builder._ctx)
return composer.compile(install)
|
BrewPi/firmware | refs/heads/master | platform/spark/firmware/nanopb/nanopb/generator/nanopb_generator.py | 4 | #!/usr/bin/env python
from __future__ import unicode_literals
'''Generate header file for nanopb from a ProtoBuf FileDescriptorSet.'''
nanopb_version = "nanopb-0.3.9"
import sys
import re
from functools import reduce
try:
# Add some dummy imports to keep packaging tools happy.
import google, distutils.util # bbfreeze seems to need these
import pkg_resources # pyinstaller / protobuf 2.5 seem to need these
except:
# Don't care, we will error out later if it is actually important.
pass
try:
import google.protobuf.text_format as text_format
import google.protobuf.descriptor_pb2 as descriptor
except:
sys.stderr.write('''
*************************************************************
*** Could not import the Google protobuf Python libraries ***
*** Try installing package 'python-protobuf' or similar. ***
*************************************************************
''' + '\n')
raise
try:
import proto.nanopb_pb2 as nanopb_pb2
import proto.plugin_pb2 as plugin_pb2
except TypeError:
sys.stderr.write('''
****************************************************************************
*** Got TypeError when importing the protocol definitions for generator. ***
*** This usually means that the protoc in your path doesn't match the ***
*** Python protobuf library version. ***
*** ***
*** Please check the output of the following commands: ***
*** which protoc ***
*** protoc --version ***
*** python -c 'import google.protobuf; print(google.protobuf.__file__)' ***
****************************************************************************
''' + '\n')
raise
except:
sys.stderr.write('''
********************************************************************
*** Failed to import the protocol definitions for generator. ***
*** You have to run 'make' in the nanopb/generator/proto folder. ***
********************************************************************
''' + '\n')
raise
# ---------------------------------------------------------------------------
# Generation of single fields
# ---------------------------------------------------------------------------
import time
import os.path
# Values are tuple (c type, pb type, encoded size, int_size_allowed)
FieldD = descriptor.FieldDescriptorProto
datatypes = {
FieldD.TYPE_BOOL: ('bool', 'BOOL', 1, False),
FieldD.TYPE_DOUBLE: ('double', 'DOUBLE', 8, False),
FieldD.TYPE_FIXED32: ('uint32_t', 'FIXED32', 4, False),
FieldD.TYPE_FIXED64: ('uint64_t', 'FIXED64', 8, False),
FieldD.TYPE_FLOAT: ('float', 'FLOAT', 4, False),
FieldD.TYPE_INT32: ('int32_t', 'INT32', 10, True),
FieldD.TYPE_INT64: ('int64_t', 'INT64', 10, True),
FieldD.TYPE_SFIXED32: ('int32_t', 'SFIXED32', 4, False),
FieldD.TYPE_SFIXED64: ('int64_t', 'SFIXED64', 8, False),
FieldD.TYPE_SINT32: ('int32_t', 'SINT32', 5, True),
FieldD.TYPE_SINT64: ('int64_t', 'SINT64', 10, True),
FieldD.TYPE_UINT32: ('uint32_t', 'UINT32', 5, True),
FieldD.TYPE_UINT64: ('uint64_t', 'UINT64', 10, True)
}
# Integer size overrides (from .proto settings)
intsizes = {
nanopb_pb2.IS_8: 'int8_t',
nanopb_pb2.IS_16: 'int16_t',
nanopb_pb2.IS_32: 'int32_t',
nanopb_pb2.IS_64: 'int64_t',
}
# String types (for python 2 / python 3 compatibility)
try:
strtypes = (unicode, str)
except NameError:
strtypes = (str, )
class Names:
'''Keeps a set of nested names and formats them to C identifier.'''
def __init__(self, parts = ()):
if isinstance(parts, Names):
parts = parts.parts
self.parts = tuple(parts)
def __str__(self):
return '_'.join(self.parts)
def __add__(self, other):
if isinstance(other, strtypes):
return Names(self.parts + (other,))
elif isinstance(other, tuple):
return Names(self.parts + other)
else:
raise ValueError("Name parts should be of type str")
def __eq__(self, other):
return isinstance(other, Names) and self.parts == other.parts
def names_from_type_name(type_name):
'''Parse Names() from FieldDescriptorProto type_name'''
if type_name[0] != '.':
raise NotImplementedError("Lookup of non-absolute type names is not supported")
return Names(type_name[1:].split('.'))
def varint_max_size(max_value):
'''Returns the maximum number of bytes a varint can take when encoded.'''
if max_value < 0:
max_value = 2**64 - max_value
for i in range(1, 11):
if (max_value >> (i * 7)) == 0:
return i
raise ValueError("Value too large for varint: " + str(max_value))
assert varint_max_size(-1) == 10
assert varint_max_size(0) == 1
assert varint_max_size(127) == 1
assert varint_max_size(128) == 2
class EncodedSize:
'''Class used to represent the encoded size of a field or a message.
Consists of a combination of symbolic sizes and integer sizes.'''
def __init__(self, value = 0, symbols = []):
if isinstance(value, EncodedSize):
self.value = value.value
self.symbols = value.symbols
elif isinstance(value, strtypes + (Names,)):
self.symbols = [str(value)]
self.value = 0
else:
self.value = value
self.symbols = symbols
def __add__(self, other):
if isinstance(other, int):
return EncodedSize(self.value + other, self.symbols)
elif isinstance(other, strtypes + (Names,)):
return EncodedSize(self.value, self.symbols + [str(other)])
elif isinstance(other, EncodedSize):
return EncodedSize(self.value + other.value, self.symbols + other.symbols)
else:
raise ValueError("Cannot add size: " + repr(other))
def __mul__(self, other):
if isinstance(other, int):
return EncodedSize(self.value * other, [str(other) + '*' + s for s in self.symbols])
else:
raise ValueError("Cannot multiply size: " + repr(other))
def __str__(self):
if not self.symbols:
return str(self.value)
else:
return '(' + str(self.value) + ' + ' + ' + '.join(self.symbols) + ')'
def upperlimit(self):
if not self.symbols:
return self.value
else:
return 2**32 - 1
class Enum:
def __init__(self, names, desc, enum_options):
'''desc is EnumDescriptorProto'''
self.options = enum_options
self.names = names + desc.name
if enum_options.long_names:
self.values = [(self.names + x.name, x.number) for x in desc.value]
else:
self.values = [(names + x.name, x.number) for x in desc.value]
self.value_longnames = [self.names + x.name for x in desc.value]
self.packed = enum_options.packed_enum
def has_negative(self):
for n, v in self.values:
if v < 0:
return True
return False
def encoded_size(self):
return max([varint_max_size(v) for n,v in self.values])
def __str__(self):
result = 'typedef enum _%s {\n' % self.names
result += ',\n'.join([" %s = %d" % x for x in self.values])
result += '\n}'
if self.packed:
result += ' pb_packed'
result += ' %s;' % self.names
result += '\n#define _%s_MIN %s' % (self.names, self.values[0][0])
result += '\n#define _%s_MAX %s' % (self.names, self.values[-1][0])
result += '\n#define _%s_ARRAYSIZE ((%s)(%s+1))' % (self.names, self.names, self.values[-1][0])
if not self.options.long_names:
# Define the long names always so that enum value references
# from other files work properly.
for i, x in enumerate(self.values):
result += '\n#define %s %s' % (self.value_longnames[i], x[0])
if self.options.enum_to_string:
result += '\nconst char *%s_name(%s v);\n' % (self.names, self.names)
return result
def enum_to_string_definition(self):
if not self.options.enum_to_string:
return ""
result = 'const char *%s_name(%s v) {\n' % (self.names, self.names)
result += ' switch (v) {\n'
for ((enumname, _), strname) in zip(self.values, self.value_longnames):
# Strip off the leading type name from the string value.
strval = str(strname)[len(str(self.names)) + 1:]
result += ' case %s: return "%s";\n' % (enumname, strval)
result += ' }\n'
result += ' return "unknown";\n'
result += '}\n'
return result
class FieldMaxSize:
def __init__(self, worst = 0, checks = [], field_name = 'undefined'):
if isinstance(worst, list):
self.worst = max(i for i in worst if i is not None)
else:
self.worst = worst
self.worst_field = field_name
self.checks = list(checks)
def extend(self, extend, field_name = None):
self.worst = max(self.worst, extend.worst)
if self.worst == extend.worst:
self.worst_field = extend.worst_field
self.checks.extend(extend.checks)
class Field:
def __init__(self, struct_name, desc, field_options):
'''desc is FieldDescriptorProto'''
self.tag = desc.number
self.struct_name = struct_name
self.union_name = None
self.name = desc.name
self.default = None
self.max_size = None
self.max_count = None
self.array_decl = ""
self.enc_size = None
self.ctype = None
if field_options.type == nanopb_pb2.FT_INLINE:
# Before nanopb-0.3.8, fixed length bytes arrays were specified
# by setting type to FT_INLINE. But to handle pointer typed fields,
# it makes sense to have it as a separate option.
field_options.type = nanopb_pb2.FT_STATIC
field_options.fixed_length = True
# Parse field options
if field_options.HasField("max_size"):
self.max_size = field_options.max_size
if desc.type == FieldD.TYPE_STRING and field_options.HasField("max_length"):
# max_length overrides max_size for strings
self.max_size = field_options.max_length + 1
if field_options.HasField("max_count"):
self.max_count = field_options.max_count
if desc.HasField('default_value'):
self.default = desc.default_value
# Check field rules, i.e. required/optional/repeated.
can_be_static = True
if desc.label == FieldD.LABEL_REPEATED:
self.rules = 'REPEATED'
if self.max_count is None:
can_be_static = False
else:
self.array_decl = '[%d]' % self.max_count
elif field_options.proto3:
self.rules = 'SINGULAR'
elif desc.label == FieldD.LABEL_REQUIRED:
self.rules = 'REQUIRED'
elif desc.label == FieldD.LABEL_OPTIONAL:
self.rules = 'OPTIONAL'
else:
raise NotImplementedError(desc.label)
# Check if the field can be implemented with static allocation
# i.e. whether the data size is known.
if desc.type == FieldD.TYPE_STRING and self.max_size is None:
can_be_static = False
if desc.type == FieldD.TYPE_BYTES and self.max_size is None:
can_be_static = False
# Decide how the field data will be allocated
if field_options.type == nanopb_pb2.FT_DEFAULT:
if can_be_static:
field_options.type = nanopb_pb2.FT_STATIC
else:
field_options.type = nanopb_pb2.FT_CALLBACK
if field_options.type == nanopb_pb2.FT_STATIC and not can_be_static:
raise Exception("Field '%s' is defined as static, but max_size or "
"max_count is not given." % self.name)
if field_options.type == nanopb_pb2.FT_STATIC:
self.allocation = 'STATIC'
elif field_options.type == nanopb_pb2.FT_POINTER:
self.allocation = 'POINTER'
elif field_options.type == nanopb_pb2.FT_CALLBACK:
self.allocation = 'CALLBACK'
else:
raise NotImplementedError(field_options.type)
# Decide the C data type to use in the struct.
if desc.type in datatypes:
self.ctype, self.pbtype, self.enc_size, isa = datatypes[desc.type]
# Override the field size if user wants to use smaller integers
if isa and field_options.int_size != nanopb_pb2.IS_DEFAULT:
self.ctype = intsizes[field_options.int_size]
if desc.type == FieldD.TYPE_UINT32 or desc.type == FieldD.TYPE_UINT64:
self.ctype = 'u' + self.ctype;
elif desc.type == FieldD.TYPE_ENUM:
self.pbtype = 'ENUM'
self.ctype = names_from_type_name(desc.type_name)
if self.default is not None:
self.default = self.ctype + self.default
self.enc_size = None # Needs to be filled in when enum values are known
elif desc.type == FieldD.TYPE_STRING:
self.pbtype = 'STRING'
self.ctype = 'char'
if self.allocation == 'STATIC':
self.ctype = 'char'
self.array_decl += '[%d]' % self.max_size
self.enc_size = varint_max_size(self.max_size) + self.max_size
elif desc.type == FieldD.TYPE_BYTES:
if field_options.fixed_length:
self.pbtype = 'FIXED_LENGTH_BYTES'
if self.max_size is None:
raise Exception("Field '%s' is defined as fixed length, "
"but max_size is not given." % self.name)
self.enc_size = varint_max_size(self.max_size) + self.max_size
self.ctype = 'pb_byte_t'
self.array_decl += '[%d]' % self.max_size
else:
self.pbtype = 'BYTES'
self.ctype = 'pb_bytes_array_t'
if self.allocation == 'STATIC':
self.ctype = self.struct_name + self.name + 't'
self.enc_size = varint_max_size(self.max_size) + self.max_size
elif desc.type == FieldD.TYPE_MESSAGE:
self.pbtype = 'MESSAGE'
self.ctype = self.submsgname = names_from_type_name(desc.type_name)
self.enc_size = None # Needs to be filled in after the message type is available
else:
raise NotImplementedError(desc.type)
def __lt__(self, other):
return self.tag < other.tag
def __str__(self):
result = ''
if self.allocation == 'POINTER':
if self.rules == 'REPEATED':
result += ' pb_size_t ' + self.name + '_count;\n'
if self.pbtype == 'MESSAGE':
# Use struct definition, so recursive submessages are possible
result += ' struct _%s *%s;' % (self.ctype, self.name)
elif self.pbtype == 'FIXED_LENGTH_BYTES':
# Pointer to fixed size array
result += ' %s (*%s)%s;' % (self.ctype, self.name, self.array_decl)
elif self.rules == 'REPEATED' and self.pbtype in ['STRING', 'BYTES']:
# String/bytes arrays need to be defined as pointers to pointers
result += ' %s **%s;' % (self.ctype, self.name)
else:
result += ' %s *%s;' % (self.ctype, self.name)
elif self.allocation == 'CALLBACK':
result += ' pb_callback_t %s;' % self.name
else:
if self.rules == 'OPTIONAL' and self.allocation == 'STATIC':
result += ' bool has_' + self.name + ';\n'
elif self.rules == 'REPEATED' and self.allocation == 'STATIC':
result += ' pb_size_t ' + self.name + '_count;\n'
result += ' %s %s%s;' % (self.ctype, self.name, self.array_decl)
return result
def types(self):
'''Return definitions for any special types this field might need.'''
if self.pbtype == 'BYTES' and self.allocation == 'STATIC':
result = 'typedef PB_BYTES_ARRAY_T(%d) %s;\n' % (self.max_size, self.ctype)
else:
result = ''
return result
def get_dependencies(self):
'''Get list of type names used by this field.'''
if self.allocation == 'STATIC':
return [str(self.ctype)]
else:
return []
def get_initializer(self, null_init, inner_init_only = False):
'''Return literal expression for this field's default value.
null_init: If True, initialize to a 0 value instead of default from .proto
inner_init_only: If True, exclude initialization for any count/has fields
'''
inner_init = None
if self.pbtype == 'MESSAGE':
if null_init:
inner_init = '%s_init_zero' % self.ctype
else:
inner_init = '%s_init_default' % self.ctype
elif self.default is None or null_init:
if self.pbtype == 'STRING':
inner_init = '""'
elif self.pbtype == 'BYTES':
inner_init = '{0, {0}}'
elif self.pbtype == 'FIXED_LENGTH_BYTES':
inner_init = '{0}'
elif self.pbtype in ('ENUM', 'UENUM'):
inner_init = '(%s)0' % self.ctype
else:
inner_init = '0'
else:
if self.pbtype == 'STRING':
inner_init = self.default.replace('"', '\\"')
inner_init = '"' + inner_init + '"'
elif self.pbtype == 'BYTES':
data = ['0x%02x' % ord(c) for c in self.default]
if len(data) == 0:
inner_init = '{0, {0}}'
else:
inner_init = '{%d, {%s}}' % (len(data), ','.join(data))
elif self.pbtype == 'FIXED_LENGTH_BYTES':
data = ['0x%02x' % ord(c) for c in self.default]
if len(data) == 0:
inner_init = '{0}'
else:
inner_init = '{%s}' % ','.join(data)
elif self.pbtype in ['FIXED32', 'UINT32']:
inner_init = str(self.default) + 'u'
elif self.pbtype in ['FIXED64', 'UINT64']:
inner_init = str(self.default) + 'ull'
elif self.pbtype in ['SFIXED64', 'INT64']:
inner_init = str(self.default) + 'll'
else:
inner_init = str(self.default)
if inner_init_only:
return inner_init
outer_init = None
if self.allocation == 'STATIC':
if self.rules == 'REPEATED':
outer_init = '0, {'
outer_init += ', '.join([inner_init] * self.max_count)
outer_init += '}'
elif self.rules == 'OPTIONAL':
outer_init = 'false, ' + inner_init
else:
outer_init = inner_init
elif self.allocation == 'POINTER':
if self.rules == 'REPEATED':
outer_init = '0, NULL'
else:
outer_init = 'NULL'
elif self.allocation == 'CALLBACK':
if self.pbtype == 'EXTENSION':
outer_init = 'NULL'
else:
outer_init = '{{NULL}, NULL}'
return outer_init
def default_decl(self, declaration_only = False):
'''Return definition for this field's default value.'''
if self.default is None:
return None
ctype = self.ctype
default = self.get_initializer(False, True)
array_decl = ''
if self.pbtype == 'STRING':
if self.allocation != 'STATIC':
return None # Not implemented
array_decl = '[%d]' % self.max_size
elif self.pbtype == 'BYTES':
if self.allocation != 'STATIC':
return None # Not implemented
elif self.pbtype == 'FIXED_LENGTH_BYTES':
if self.allocation != 'STATIC':
return None # Not implemented
array_decl = '[%d]' % self.max_size
if declaration_only:
return 'extern const %s %s_default%s;' % (ctype, self.struct_name + self.name, array_decl)
else:
return 'const %s %s_default%s = %s;' % (ctype, self.struct_name + self.name, array_decl, default)
def tags(self):
'''Return the #define for the tag number of this field.'''
identifier = '%s_%s_tag' % (self.struct_name, self.name)
return '#define %-40s %d\n' % (identifier, self.tag)
def pb_field_t(self, prev_field_name, union_index = None):
'''Return the pb_field_t initializer to use in the constant array.
prev_field_name is the name of the previous field or None. For OneOf
unions, union_index is the index of this field inside the OneOf.
'''
if self.rules == 'ONEOF':
if self.anonymous:
result = ' PB_ANONYMOUS_ONEOF_FIELD(%s, ' % self.union_name
else:
result = ' PB_ONEOF_FIELD(%s, ' % self.union_name
else:
result = ' PB_FIELD('
result += '%3d, ' % self.tag
result += '%-8s, ' % self.pbtype
result += '%s, ' % self.rules
result += '%-8s, ' % self.allocation
if union_index is not None and union_index > 0:
result += 'UNION, '
elif prev_field_name is None:
result += 'FIRST, '
else:
result += 'OTHER, '
result += '%s, ' % self.struct_name
result += '%s, ' % self.name
result += '%s, ' % (prev_field_name or self.name)
if self.pbtype == 'MESSAGE':
result += '&%s_fields)' % self.submsgname
elif self.default is None:
result += '0)'
elif self.pbtype in ['BYTES', 'STRING', 'FIXED_LENGTH_BYTES'] and self.allocation != 'STATIC':
result += '0)' # Arbitrary size default values not implemented
elif self.rules == 'OPTEXT':
result += '0)' # Default value for extensions is not implemented
else:
result += '&%s_default)' % (self.struct_name + self.name)
return result
def get_last_field_name(self):
return self.name
def largest_field_value(self):
'''Determine if this field needs 16bit or 32bit pb_field_t structure to compile properly.
Returns numeric value or a C-expression for assert.'''
check = []
if self.pbtype == 'MESSAGE' and self.allocation == 'STATIC':
if self.rules == 'REPEATED':
check.append('pb_membersize(%s, %s[0])' % (self.struct_name, self.name))
elif self.rules == 'ONEOF':
if self.anonymous:
check.append('pb_membersize(%s, %s)' % (self.struct_name, self.name))
else:
check.append('pb_membersize(%s, %s.%s)' % (self.struct_name, self.union_name, self.name))
else:
check.append('pb_membersize(%s, %s)' % (self.struct_name, self.name))
elif self.pbtype == 'BYTES' and self.allocation == 'STATIC':
if self.max_size > 251:
check.append('pb_membersize(%s, %s)' % (self.struct_name, self.name))
return FieldMaxSize([self.tag, self.max_size, self.max_count],
check,
('%s.%s' % (self.struct_name, self.name)))
def encoded_size(self, dependencies):
'''Return the maximum size that this field can take when encoded,
including the field tag. If the size cannot be determined, returns
None.'''
if self.allocation != 'STATIC':
return None
if self.pbtype == 'MESSAGE':
encsize = None
if str(self.submsgname) in dependencies:
submsg = dependencies[str(self.submsgname)]
encsize = submsg.encoded_size(dependencies)
if encsize is not None:
# Include submessage length prefix
encsize += varint_max_size(encsize.upperlimit())
if encsize is None:
# Submessage or its size cannot be found.
# This can occur if submessage is defined in different
# file, and it or its .options could not be found.
# Instead of direct numeric value, reference the size that
# has been #defined in the other file.
encsize = EncodedSize(self.submsgname + 'size')
# We will have to make a conservative assumption on the length
# prefix size, though.
encsize += 5
elif self.pbtype in ['ENUM', 'UENUM']:
if str(self.ctype) in dependencies:
enumtype = dependencies[str(self.ctype)]
encsize = enumtype.encoded_size()
else:
# Conservative assumption
encsize = 10
elif self.enc_size is None:
raise RuntimeError("Could not determine encoded size for %s.%s"
% (self.struct_name, self.name))
else:
encsize = EncodedSize(self.enc_size)
encsize += varint_max_size(self.tag << 3) # Tag + wire type
if self.rules == 'REPEATED':
# Decoders must be always able to handle unpacked arrays.
# Therefore we have to reserve space for it, even though
# we emit packed arrays ourselves. For length of 1, packed
# arrays are larger however so we need to add allowance
# for the length byte.
encsize *= self.max_count
if self.max_count == 1:
encsize += 1
return encsize
class ExtensionRange(Field):
def __init__(self, struct_name, range_start, field_options):
'''Implements a special pb_extension_t* field in an extensible message
structure. The range_start signifies the index at which the extensions
start. Not necessarily all tags above this are extensions, it is merely
a speed optimization.
'''
self.tag = range_start
self.struct_name = struct_name
self.name = 'extensions'
self.pbtype = 'EXTENSION'
self.rules = 'OPTIONAL'
self.allocation = 'CALLBACK'
self.ctype = 'pb_extension_t'
self.array_decl = ''
self.default = None
self.max_size = 0
self.max_count = 0
def __str__(self):
return ' pb_extension_t *extensions;'
def types(self):
return ''
def tags(self):
return ''
def encoded_size(self, dependencies):
# We exclude extensions from the count, because they cannot be known
# until runtime. Other option would be to return None here, but this
# way the value remains useful if extensions are not used.
return EncodedSize(0)
class ExtensionField(Field):
def __init__(self, struct_name, desc, field_options):
self.fullname = struct_name + desc.name
self.extendee_name = names_from_type_name(desc.extendee)
Field.__init__(self, self.fullname + 'struct', desc, field_options)
if self.rules != 'OPTIONAL':
self.skip = True
else:
self.skip = False
self.rules = 'OPTEXT'
def tags(self):
'''Return the #define for the tag number of this field.'''
identifier = '%s_tag' % self.fullname
return '#define %-40s %d\n' % (identifier, self.tag)
def extension_decl(self):
'''Declaration of the extension type in the .pb.h file'''
if self.skip:
msg = '/* Extension field %s was skipped because only "optional"\n' % self.fullname
msg +=' type of extension fields is currently supported. */\n'
return msg
return ('extern const pb_extension_type_t %s; /* field type: %s */\n' %
(self.fullname, str(self).strip()))
def extension_def(self):
'''Definition of the extension type in the .pb.c file'''
if self.skip:
return ''
result = 'typedef struct {\n'
result += str(self)
result += '\n} %s;\n\n' % self.struct_name
result += ('static const pb_field_t %s_field = \n %s;\n\n' %
(self.fullname, self.pb_field_t(None)))
result += 'const pb_extension_type_t %s = {\n' % self.fullname
result += ' NULL,\n'
result += ' NULL,\n'
result += ' &%s_field\n' % self.fullname
result += '};\n'
return result
# ---------------------------------------------------------------------------
# Generation of oneofs (unions)
# ---------------------------------------------------------------------------
class OneOf(Field):
def __init__(self, struct_name, oneof_desc):
self.struct_name = struct_name
self.name = oneof_desc.name
self.ctype = 'union'
self.pbtype = 'oneof'
self.fields = []
self.allocation = 'ONEOF'
self.default = None
self.rules = 'ONEOF'
self.anonymous = False
def add_field(self, field):
if field.allocation == 'CALLBACK':
raise Exception("Callback fields inside of oneof are not supported"
+ " (field %s)" % field.name)
field.union_name = self.name
field.rules = 'ONEOF'
field.anonymous = self.anonymous
self.fields.append(field)
self.fields.sort(key = lambda f: f.tag)
# Sort by the lowest tag number inside union
self.tag = min([f.tag for f in self.fields])
def __str__(self):
result = ''
if self.fields:
result += ' pb_size_t which_' + self.name + ";\n"
result += ' union {\n'
for f in self.fields:
result += ' ' + str(f).replace('\n', '\n ') + '\n'
if self.anonymous:
result += ' };'
else:
result += ' } ' + self.name + ';'
return result
def types(self):
return ''.join([f.types() for f in self.fields])
def get_dependencies(self):
deps = []
for f in self.fields:
deps += f.get_dependencies()
return deps
def get_initializer(self, null_init):
return '0, {' + self.fields[0].get_initializer(null_init) + '}'
def default_decl(self, declaration_only = False):
return None
def tags(self):
return ''.join([f.tags() for f in self.fields])
def pb_field_t(self, prev_field_name):
parts = []
for union_index, field in enumerate(self.fields):
parts.append(field.pb_field_t(prev_field_name, union_index))
return ',\n'.join(parts)
def get_last_field_name(self):
if self.anonymous:
return self.fields[-1].name
else:
return self.name + '.' + self.fields[-1].name
def largest_field_value(self):
largest = FieldMaxSize()
for f in self.fields:
largest.extend(f.largest_field_value())
return largest
def encoded_size(self, dependencies):
'''Returns the size of the largest oneof field.'''
largest = EncodedSize(0)
for f in self.fields:
size = EncodedSize(f.encoded_size(dependencies))
if size.value is None:
return None
elif size.symbols:
return None # Cannot resolve maximum of symbols
elif size.value > largest.value:
largest = size
return largest
# ---------------------------------------------------------------------------
# Generation of messages (structures)
# ---------------------------------------------------------------------------
class Message:
def __init__(self, names, desc, message_options):
self.name = names
self.fields = []
self.oneofs = {}
no_unions = []
if message_options.msgid:
self.msgid = message_options.msgid
if hasattr(desc, 'oneof_decl'):
for i, f in enumerate(desc.oneof_decl):
oneof_options = get_nanopb_suboptions(desc, message_options, self.name + f.name)
if oneof_options.no_unions:
no_unions.append(i) # No union, but add fields normally
elif oneof_options.type == nanopb_pb2.FT_IGNORE:
pass # No union and skip fields also
else:
oneof = OneOf(self.name, f)
if oneof_options.anonymous_oneof:
oneof.anonymous = True
self.oneofs[i] = oneof
self.fields.append(oneof)
for f in desc.field:
field_options = get_nanopb_suboptions(f, message_options, self.name + f.name)
if field_options.type == nanopb_pb2.FT_IGNORE:
continue
field = Field(self.name, f, field_options)
if (hasattr(f, 'oneof_index') and
f.HasField('oneof_index') and
f.oneof_index not in no_unions):
if f.oneof_index in self.oneofs:
self.oneofs[f.oneof_index].add_field(field)
else:
self.fields.append(field)
if len(desc.extension_range) > 0:
field_options = get_nanopb_suboptions(desc, message_options, self.name + 'extensions')
range_start = min([r.start for r in desc.extension_range])
if field_options.type != nanopb_pb2.FT_IGNORE:
self.fields.append(ExtensionRange(self.name, range_start, field_options))
self.packed = message_options.packed_struct
self.ordered_fields = self.fields[:]
self.ordered_fields.sort()
def get_dependencies(self):
'''Get list of type names that this structure refers to.'''
deps = []
for f in self.fields:
deps += f.get_dependencies()
return deps
def __str__(self):
result = 'typedef struct _%s {\n' % self.name
if not self.ordered_fields:
# Empty structs are not allowed in C standard.
# Therefore add a dummy field if an empty message occurs.
result += ' char dummy_field;'
result += '\n'.join([str(f) for f in self.ordered_fields])
result += '\n/* @@protoc_insertion_point(struct:%s) */' % self.name
result += '\n}'
if self.packed:
result += ' pb_packed'
result += ' %s;' % self.name
if self.packed:
result = 'PB_PACKED_STRUCT_START\n' + result
result += '\nPB_PACKED_STRUCT_END'
return result
def types(self):
return ''.join([f.types() for f in self.fields])
def get_initializer(self, null_init):
if not self.ordered_fields:
return '{0}'
parts = []
for field in self.ordered_fields:
parts.append(field.get_initializer(null_init))
return '{' + ', '.join(parts) + '}'
def default_decl(self, declaration_only = False):
result = ""
for field in self.fields:
default = field.default_decl(declaration_only)
if default is not None:
result += default + '\n'
return result
def count_required_fields(self):
'''Returns number of required fields inside this message'''
count = 0
for f in self.fields:
if not isinstance(f, OneOf):
if f.rules == 'REQUIRED':
count += 1
return count
def count_all_fields(self):
count = 0
for f in self.fields:
if isinstance(f, OneOf):
count += len(f.fields)
else:
count += 1
return count
def fields_declaration(self):
result = 'extern const pb_field_t %s_fields[%d];' % (self.name, self.count_all_fields() + 1)
return result
def fields_definition(self):
result = 'const pb_field_t %s_fields[%d] = {\n' % (self.name, self.count_all_fields() + 1)
prev = None
for field in self.ordered_fields:
result += field.pb_field_t(prev)
result += ',\n'
prev = field.get_last_field_name()
result += ' PB_LAST_FIELD\n};'
return result
def encoded_size(self, dependencies):
'''Return the maximum size that this message can take when encoded.
If the size cannot be determined, returns None.
'''
size = EncodedSize(0)
for field in self.fields:
fsize = field.encoded_size(dependencies)
if fsize is None:
return None
size += fsize
return size
# ---------------------------------------------------------------------------
# Processing of entire .proto files
# ---------------------------------------------------------------------------
def iterate_messages(desc, names = Names()):
'''Recursively find all messages. For each, yield name, DescriptorProto.'''
if hasattr(desc, 'message_type'):
submsgs = desc.message_type
else:
submsgs = desc.nested_type
for submsg in submsgs:
sub_names = names + submsg.name
yield sub_names, submsg
for x in iterate_messages(submsg, sub_names):
yield x
def iterate_extensions(desc, names = Names()):
'''Recursively find all extensions.
For each, yield name, FieldDescriptorProto.
'''
for extension in desc.extension:
yield names, extension
for subname, subdesc in iterate_messages(desc, names):
for extension in subdesc.extension:
yield subname, extension
def toposort2(data):
'''Topological sort.
From http://code.activestate.com/recipes/577413-topological-sort/
This function is under the MIT license.
'''
for k, v in list(data.items()):
v.discard(k) # Ignore self dependencies
extra_items_in_deps = reduce(set.union, list(data.values()), set()) - set(data.keys())
data.update(dict([(item, set()) for item in extra_items_in_deps]))
while True:
ordered = set(item for item,dep in list(data.items()) if not dep)
if not ordered:
break
for item in sorted(ordered):
yield item
data = dict([(item, (dep - ordered)) for item,dep in list(data.items())
if item not in ordered])
assert not data, "A cyclic dependency exists amongst %r" % data
def sort_dependencies(messages):
'''Sort a list of Messages based on dependencies.'''
dependencies = {}
message_by_name = {}
for message in messages:
dependencies[str(message.name)] = set(message.get_dependencies())
message_by_name[str(message.name)] = message
for msgname in toposort2(dependencies):
if msgname in message_by_name:
yield message_by_name[msgname]
def make_identifier(headername):
'''Make #ifndef identifier that contains uppercase A-Z and digits 0-9'''
result = ""
for c in headername.upper():
if c.isalnum():
result += c
else:
result += '_'
return result
class ProtoFile:
def __init__(self, fdesc, file_options):
'''Takes a FileDescriptorProto and parses it.'''
self.fdesc = fdesc
self.file_options = file_options
self.dependencies = {}
self.parse()
# Some of types used in this file probably come from the file itself.
# Thus it has implicit dependency on itself.
self.add_dependency(self)
def parse(self):
self.enums = []
self.messages = []
self.extensions = []
if self.fdesc.package:
base_name = Names(self.fdesc.package.split('.'))
else:
base_name = Names()
for enum in self.fdesc.enum_type:
enum_options = get_nanopb_suboptions(enum, self.file_options, base_name + enum.name)
self.enums.append(Enum(base_name, enum, enum_options))
for names, message in iterate_messages(self.fdesc, base_name):
message_options = get_nanopb_suboptions(message, self.file_options, names)
if message_options.skip_message:
continue
self.messages.append(Message(names, message, message_options))
for enum in message.enum_type:
enum_options = get_nanopb_suboptions(enum, message_options, names + enum.name)
self.enums.append(Enum(names, enum, enum_options))
for names, extension in iterate_extensions(self.fdesc, base_name):
field_options = get_nanopb_suboptions(extension, self.file_options, names + extension.name)
if field_options.type != nanopb_pb2.FT_IGNORE:
self.extensions.append(ExtensionField(names, extension, field_options))
def add_dependency(self, other):
for enum in other.enums:
self.dependencies[str(enum.names)] = enum
for msg in other.messages:
self.dependencies[str(msg.name)] = msg
# Fix field default values where enum short names are used.
for enum in other.enums:
if not enum.options.long_names:
for message in self.messages:
for field in message.fields:
if field.default in enum.value_longnames:
idx = enum.value_longnames.index(field.default)
field.default = enum.values[idx][0]
# Fix field data types where enums have negative values.
for enum in other.enums:
if not enum.has_negative():
for message in self.messages:
for field in message.fields:
if field.pbtype == 'ENUM' and field.ctype == enum.names:
field.pbtype = 'UENUM'
def generate_header(self, includes, headername, options):
'''Generate content for a header file.
Generates strings, which should be concatenated and stored to file.
'''
yield '/* Automatically generated nanopb header */\n'
if options.notimestamp:
yield '/* Generated by %s */\n\n' % (nanopb_version)
else:
yield '/* Generated by %s at %s. */\n\n' % (nanopb_version, time.asctime())
if self.fdesc.package:
symbol = make_identifier(self.fdesc.package + '_' + headername)
else:
symbol = make_identifier(headername)
yield '#ifndef PB_%s_INCLUDED\n' % symbol
yield '#define PB_%s_INCLUDED\n' % symbol
try:
yield options.libformat % ('pb.h')
except TypeError:
# no %s specified - use whatever was passed in as options.libformat
yield options.libformat
yield '\n'
for incfile in includes:
noext = os.path.splitext(incfile)[0]
yield options.genformat % (noext + options.extension + options.header_extension)
yield '\n'
yield '/* @@protoc_insertion_point(includes) */\n'
yield '#if PB_PROTO_HEADER_VERSION != 30\n'
yield '#error Regenerate this file with the current version of nanopb generator.\n'
yield '#endif\n'
yield '\n'
yield '#ifdef __cplusplus\n'
yield 'extern "C" {\n'
yield '#endif\n\n'
if self.enums:
yield '/* Enum definitions */\n'
for enum in self.enums:
yield str(enum) + '\n\n'
if self.messages:
yield '/* Struct definitions */\n'
for msg in sort_dependencies(self.messages):
yield msg.types()
yield str(msg) + '\n\n'
if self.extensions:
yield '/* Extensions */\n'
for extension in self.extensions:
yield extension.extension_decl()
yield '\n'
if self.messages:
yield '/* Default values for struct fields */\n'
for msg in self.messages:
yield msg.default_decl(True)
yield '\n'
yield '/* Initializer values for message structs */\n'
for msg in self.messages:
identifier = '%s_init_default' % msg.name
yield '#define %-40s %s\n' % (identifier, msg.get_initializer(False))
for msg in self.messages:
identifier = '%s_init_zero' % msg.name
yield '#define %-40s %s\n' % (identifier, msg.get_initializer(True))
yield '\n'
yield '/* Field tags (for use in manual encoding/decoding) */\n'
for msg in sort_dependencies(self.messages):
for field in msg.fields:
yield field.tags()
for extension in self.extensions:
yield extension.tags()
yield '\n'
yield '/* Struct field encoding specification for nanopb */\n'
for msg in self.messages:
yield msg.fields_declaration() + '\n'
yield '\n'
yield '/* Maximum encoded size of messages (where known) */\n'
for msg in self.messages:
msize = msg.encoded_size(self.dependencies)
identifier = '%s_size' % msg.name
if msize is not None:
yield '#define %-40s %s\n' % (identifier, msize)
else:
yield '/* %s depends on runtime parameters */\n' % identifier
yield '\n'
yield '/* Message IDs (where set with "msgid" option) */\n'
yield '#ifdef PB_MSGID\n'
for msg in self.messages:
if hasattr(msg,'msgid'):
yield '#define PB_MSG_%d %s\n' % (msg.msgid, msg.name)
yield '\n'
symbol = make_identifier(headername.split('.')[0])
yield '#define %s_MESSAGES \\\n' % symbol
for msg in self.messages:
m = "-1"
msize = msg.encoded_size(self.dependencies)
if msize is not None:
m = msize
if hasattr(msg,'msgid'):
yield '\tPB_MSG(%d,%s,%s) \\\n' % (msg.msgid, m, msg.name)
yield '\n'
for msg in self.messages:
if hasattr(msg,'msgid'):
yield '#define %s_msgid %d\n' % (msg.name, msg.msgid)
yield '\n'
yield '#endif\n\n'
yield '#ifdef __cplusplus\n'
yield '} /* extern "C" */\n'
yield '#endif\n'
# End of header
yield '/* @@protoc_insertion_point(eof) */\n'
yield '\n#endif\n'
def generate_source(self, headername, options):
'''Generate content for a source file.'''
yield '/* Automatically generated nanopb constant definitions */\n'
if options.notimestamp:
yield '/* Generated by %s */\n\n' % (nanopb_version)
else:
yield '/* Generated by %s at %s. */\n\n' % (nanopb_version, time.asctime())
yield options.genformat % (headername)
yield '\n'
yield '/* @@protoc_insertion_point(includes) */\n'
yield '#if PB_PROTO_HEADER_VERSION != 30\n'
yield '#error Regenerate this file with the current version of nanopb generator.\n'
yield '#endif\n'
yield '\n'
for msg in self.messages:
yield msg.default_decl(False)
yield '\n\n'
for msg in self.messages:
yield msg.fields_definition() + '\n\n'
for ext in self.extensions:
yield ext.extension_def() + '\n'
for enum in self.enums:
yield enum.enum_to_string_definition() + '\n'
# Add checks for numeric limits
if self.messages:
largest_msg = max(self.messages, key = lambda m: m.count_required_fields())
largest_count = largest_msg.count_required_fields()
if largest_count > 64:
yield '\n/* Check that missing required fields will be properly detected */\n'
yield '#if PB_MAX_REQUIRED_FIELDS < %d\n' % largest_count
yield '#error Properly detecting missing required fields in %s requires \\\n' % largest_msg.name
yield ' setting PB_MAX_REQUIRED_FIELDS to %d or more.\n' % largest_count
yield '#endif\n'
max_field = FieldMaxSize()
checks_msgnames = []
for msg in self.messages:
checks_msgnames.append(msg.name)
for field in msg.fields:
max_field.extend(field.largest_field_value())
worst = max_field.worst
worst_field = max_field.worst_field
checks = max_field.checks
if worst > 255 or checks:
yield '\n/* Check that field information fits in pb_field_t */\n'
if worst > 65535 or checks:
yield '#if !defined(PB_FIELD_32BIT)\n'
if worst > 65535:
yield '#error Field descriptor for %s is too large. Define PB_FIELD_32BIT to fix this.\n' % worst_field
else:
assertion = ' && '.join(str(c) + ' < 65536' for c in checks)
msgs = '_'.join(str(n) for n in checks_msgnames)
yield '/* If you get an error here, it means that you need to define PB_FIELD_32BIT\n'
yield ' * compile-time option. You can do that in pb.h or on compiler command line.\n'
yield ' * \n'
yield ' * The reason you need to do this is that some of your messages contain tag\n'
yield ' * numbers or field sizes that are larger than what can fit in 8 or 16 bit\n'
yield ' * field descriptors.\n'
yield ' */\n'
yield 'PB_STATIC_ASSERT((%s), YOU_MUST_DEFINE_PB_FIELD_32BIT_FOR_MESSAGES_%s)\n'%(assertion,msgs)
yield '#endif\n\n'
if worst < 65536:
yield '#if !defined(PB_FIELD_16BIT) && !defined(PB_FIELD_32BIT)\n'
if worst > 255:
yield '#error Field descriptor for %s is too large. Define PB_FIELD_16BIT to fix this.\n' % worst_field
else:
assertion = ' && '.join(str(c) + ' < 256' for c in checks)
msgs = '_'.join(str(n) for n in checks_msgnames)
yield '/* If you get an error here, it means that you need to define PB_FIELD_16BIT\n'
yield ' * compile-time option. You can do that in pb.h or on compiler command line.\n'
yield ' * \n'
yield ' * The reason you need to do this is that some of your messages contain tag\n'
yield ' * numbers or field sizes that are larger than what can fit in the default\n'
yield ' * 8 bit descriptors.\n'
yield ' */\n'
yield 'PB_STATIC_ASSERT((%s), YOU_MUST_DEFINE_PB_FIELD_16BIT_FOR_MESSAGES_%s)\n'%(assertion,msgs)
yield '#endif\n\n'
# Add check for sizeof(double)
has_double = False
for msg in self.messages:
for field in msg.fields:
if field.ctype == 'double':
has_double = True
if has_double:
yield '\n'
yield '/* On some platforms (such as AVR), double is really float.\n'
yield ' * These are not directly supported by nanopb, but see example_avr_double.\n'
yield ' * To get rid of this error, remove any double fields from your .proto.\n'
yield ' */\n'
yield 'PB_STATIC_ASSERT(sizeof(double) == 8, DOUBLE_MUST_BE_8_BYTES)\n'
yield '\n'
yield '/* @@protoc_insertion_point(eof) */\n'
# ---------------------------------------------------------------------------
# Options parsing for the .proto files
# ---------------------------------------------------------------------------
from fnmatch import fnmatch
def read_options_file(infile):
'''Parse a separate options file to list:
[(namemask, options), ...]
'''
results = []
data = infile.read()
data = re.sub('/\*.*?\*/', '', data, flags = re.MULTILINE)
data = re.sub('//.*?$', '', data, flags = re.MULTILINE)
data = re.sub('#.*?$', '', data, flags = re.MULTILINE)
for i, line in enumerate(data.split('\n')):
line = line.strip()
if not line:
continue
parts = line.split(None, 1)
if len(parts) < 2:
sys.stderr.write("%s:%d: " % (infile.name, i + 1) +
"Option lines should have space between field name and options. " +
"Skipping line: '%s'\n" % line)
continue
opts = nanopb_pb2.NanoPBOptions()
try:
text_format.Merge(parts[1], opts)
except Exception as e:
sys.stderr.write("%s:%d: " % (infile.name, i + 1) +
"Unparseable option line: '%s'. " % line +
"Error: %s\n" % str(e))
continue
results.append((parts[0], opts))
return results
class Globals:
'''Ugly global variables, should find a good way to pass these.'''
verbose_options = False
separate_options = []
matched_namemasks = set()
def get_nanopb_suboptions(subdesc, options, name):
'''Get copy of options, and merge information from subdesc.'''
new_options = nanopb_pb2.NanoPBOptions()
new_options.CopyFrom(options)
if hasattr(subdesc, 'syntax') and subdesc.syntax == "proto3":
new_options.proto3 = True
# Handle options defined in a separate file
dotname = '.'.join(name.parts)
for namemask, options in Globals.separate_options:
if fnmatch(dotname, namemask):
Globals.matched_namemasks.add(namemask)
new_options.MergeFrom(options)
# Handle options defined in .proto
if isinstance(subdesc.options, descriptor.FieldOptions):
ext_type = nanopb_pb2.nanopb
elif isinstance(subdesc.options, descriptor.FileOptions):
ext_type = nanopb_pb2.nanopb_fileopt
elif isinstance(subdesc.options, descriptor.MessageOptions):
ext_type = nanopb_pb2.nanopb_msgopt
elif isinstance(subdesc.options, descriptor.EnumOptions):
ext_type = nanopb_pb2.nanopb_enumopt
else:
raise Exception("Unknown options type")
if subdesc.options.HasExtension(ext_type):
ext = subdesc.options.Extensions[ext_type]
new_options.MergeFrom(ext)
if Globals.verbose_options:
sys.stderr.write("Options for " + dotname + ": ")
sys.stderr.write(text_format.MessageToString(new_options) + "\n")
return new_options
# ---------------------------------------------------------------------------
# Command line interface
# ---------------------------------------------------------------------------
import sys
import os.path
from optparse import OptionParser
optparser = OptionParser(
usage = "Usage: nanopb_generator.py [options] file.pb ...",
epilog = "Compile file.pb from file.proto by: 'protoc -ofile.pb file.proto'. " +
"Output will be written to file.pb.h and file.pb.c.")
optparser.add_option("-x", dest="exclude", metavar="FILE", action="append", default=[],
help="Exclude file from generated #include list.")
optparser.add_option("-e", "--extension", dest="extension", metavar="EXTENSION", default=".pb",
help="Set extension to use instead of '.pb' for generated files. [default: %default]")
optparser.add_option("-H", "--header-extension", dest="header_extension", metavar="EXTENSION", default=".h",
help="Set extension to use for generated header files. [default: %default]")
optparser.add_option("-S", "--source-extension", dest="source_extension", metavar="EXTENSION", default=".c",
help="Set extension to use for generated source files. [default: %default]")
optparser.add_option("-f", "--options-file", dest="options_file", metavar="FILE", default="%s.options",
help="Set name of a separate generator options file.")
optparser.add_option("-I", "--options-path", dest="options_path", metavar="DIR",
action="append", default = [],
help="Search for .options files additionally in this path")
optparser.add_option("-D", "--output-dir", dest="output_dir",
metavar="OUTPUTDIR", default=None,
help="Output directory of .pb.h and .pb.c files")
optparser.add_option("-Q", "--generated-include-format", dest="genformat",
metavar="FORMAT", default='#include "%s"\n',
help="Set format string to use for including other .pb.h files. [default: %default]")
optparser.add_option("-L", "--library-include-format", dest="libformat",
metavar="FORMAT", default='#include <%s>\n',
help="Set format string to use for including the nanopb pb.h header. [default: %default]")
optparser.add_option("-T", "--no-timestamp", dest="notimestamp", action="store_true", default=False,
help="Don't add timestamp to .pb.h and .pb.c preambles")
optparser.add_option("-q", "--quiet", dest="quiet", action="store_true", default=False,
help="Don't print anything except errors.")
optparser.add_option("-v", "--verbose", dest="verbose", action="store_true", default=False,
help="Print more information.")
optparser.add_option("-s", dest="settings", metavar="OPTION:VALUE", action="append", default=[],
help="Set generator option (max_size, max_count etc.).")
def parse_file(filename, fdesc, options):
'''Parse a single file. Returns a ProtoFile instance.'''
toplevel_options = nanopb_pb2.NanoPBOptions()
for s in options.settings:
text_format.Merge(s, toplevel_options)
if not fdesc:
data = open(filename, 'rb').read()
fdesc = descriptor.FileDescriptorSet.FromString(data).file[0]
# Check if there is a separate .options file
had_abspath = False
try:
optfilename = options.options_file % os.path.splitext(filename)[0]
except TypeError:
# No %s specified, use the filename as-is
optfilename = options.options_file
had_abspath = True
paths = ['.'] + options.options_path
for p in paths:
if os.path.isfile(os.path.join(p, optfilename)):
optfilename = os.path.join(p, optfilename)
if options.verbose:
sys.stderr.write('Reading options from ' + optfilename + '\n')
Globals.separate_options = read_options_file(open(optfilename, "rU"))
break
else:
# If we are given a full filename and it does not exist, give an error.
# However, don't give error when we automatically look for .options file
# with the same name as .proto.
if options.verbose or had_abspath:
sys.stderr.write('Options file not found: ' + optfilename + '\n')
Globals.separate_options = []
Globals.matched_namemasks = set()
# Parse the file
file_options = get_nanopb_suboptions(fdesc, toplevel_options, Names([filename]))
f = ProtoFile(fdesc, file_options)
f.optfilename = optfilename
return f
def process_file(filename, fdesc, options, other_files = {}):
'''Process a single file.
filename: The full path to the .proto or .pb source file, as string.
fdesc: The loaded FileDescriptorSet, or None to read from the input file.
options: Command line options as they come from OptionsParser.
Returns a dict:
{'headername': Name of header file,
'headerdata': Data for the .h header file,
'sourcename': Name of the source code file,
'sourcedata': Data for the .c source code file
}
'''
f = parse_file(filename, fdesc, options)
# Provide dependencies if available
for dep in f.fdesc.dependency:
if dep in other_files:
f.add_dependency(other_files[dep])
# Decide the file names
noext = os.path.splitext(filename)[0]
headername = noext + options.extension + options.header_extension
sourcename = noext + options.extension + options.source_extension
headerbasename = os.path.basename(headername)
# List of .proto files that should not be included in the C header file
# even if they are mentioned in the source .proto.
excludes = ['nanopb.proto', 'google/protobuf/descriptor.proto'] + options.exclude
includes = [d for d in f.fdesc.dependency if d not in excludes]
headerdata = ''.join(f.generate_header(includes, headerbasename, options))
sourcedata = ''.join(f.generate_source(headerbasename, options))
# Check if there were any lines in .options that did not match a member
unmatched = [n for n,o in Globals.separate_options if n not in Globals.matched_namemasks]
if unmatched and not options.quiet:
sys.stderr.write("Following patterns in " + f.optfilename + " did not match any fields: "
+ ', '.join(unmatched) + "\n")
if not Globals.verbose_options:
sys.stderr.write("Use protoc --nanopb-out=-v:. to see a list of the field names.\n")
return {'headername': headername, 'headerdata': headerdata,
'sourcename': sourcename, 'sourcedata': sourcedata}
def main_cli():
'''Main function when invoked directly from the command line.'''
options, filenames = optparser.parse_args()
if not filenames:
optparser.print_help()
sys.exit(1)
if options.quiet:
options.verbose = False
if options.output_dir and not os.path.exists(options.output_dir):
optparser.print_help()
sys.stderr.write("\noutput_dir does not exist: %s\n" % options.output_dir)
sys.exit(1)
Globals.verbose_options = options.verbose
for filename in filenames:
results = process_file(filename, None, options)
base_dir = options.output_dir or ''
to_write = [
(os.path.join(base_dir, results['headername']), results['headerdata']),
(os.path.join(base_dir, results['sourcename']), results['sourcedata']),
]
if not options.quiet:
paths = " and ".join([x[0] for x in to_write])
sys.stderr.write("Writing to %s\n" % paths)
for path, data in to_write:
with open(path, 'w') as f:
f.write(data)
def main_plugin():
'''Main function when invoked as a protoc plugin.'''
import io, sys
if sys.platform == "win32":
import os, msvcrt
# Set stdin and stdout to binary mode
msvcrt.setmode(sys.stdin.fileno(), os.O_BINARY)
msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
data = io.open(sys.stdin.fileno(), "rb").read()
request = plugin_pb2.CodeGeneratorRequest.FromString(data)
try:
# Versions of Python prior to 2.7.3 do not support unicode
# input to shlex.split(). Try to convert to str if possible.
params = str(request.parameter)
except UnicodeEncodeError:
params = request.parameter
import shlex
args = shlex.split(params)
options, dummy = optparser.parse_args(args)
Globals.verbose_options = options.verbose
response = plugin_pb2.CodeGeneratorResponse()
# Google's protoc does not currently indicate the full path of proto files.
# Instead always add the main file path to the search dirs, that works for
# the common case.
import os.path
options.options_path.append(os.path.dirname(request.file_to_generate[0]))
# Process any include files first, in order to have them
# available as dependencies
other_files = {}
for fdesc in request.proto_file:
other_files[fdesc.name] = parse_file(fdesc.name, fdesc, options)
for filename in request.file_to_generate:
for fdesc in request.proto_file:
if fdesc.name == filename:
results = process_file(filename, fdesc, options, other_files)
f = response.file.add()
f.name = results['headername']
f.content = results['headerdata']
f = response.file.add()
f.name = results['sourcename']
f.content = results['sourcedata']
io.open(sys.stdout.fileno(), "wb").write(response.SerializeToString())
if __name__ == '__main__':
# Check if we are running as a plugin under protoc
if 'protoc-gen-' in sys.argv[0] or '--protoc-plugin' in sys.argv:
main_plugin()
else:
main_cli()
|
winzard/django-seo | refs/heads/master | regressiontests/userapp/models.py | 11 | from django.db import models
class Page(models.Model):
title = models.CharField(max_length=255, default="", blank=True)
type = models.CharField(max_length=50, default="", blank=True)
content = models.TextField(default="", blank=True)
@models.permalink
def get_absolute_url(self):
return ('userapp_page_detail', [self.type], {})
def __unicode__(self):
return self.title or self.content
class Product(models.Model):
meta_description = models.TextField(default="")
meta_keywords = models.CharField(max_length=255, default="")
meta_title = models.CharField(max_length=255, default="")
@models.permalink
def get_absolute_url(self):
return ('userapp_product_detail', [self.id], {})
def __unicode__(self):
return self.meta_title
class Category(models.Model):
name = models.CharField(max_length=255, default="M Category Name")
page_title = models.CharField(max_length=255, default="M Category Page Title")
@models.permalink
def get_absolute_url(self):
return ('userapp_my_view', ["abc"], {})
class NoPath(models.Model):
pass
class Tag(models.Model):
name = models.CharField(max_length=255, default="")
@models.permalink
def get_absolute_url(self):
return ('userapp_tag', [self.name], {})
def __unicode__(self):
return self.name
|
guorendong/iridium-browser-ubuntu | refs/heads/ubuntu/precise | tools/grit/grit/tool/toolbar_postprocess.py | 61 | #!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
''' Toolbar postprocessing class. Modifies the previously processed GRD tree
by creating separate message groups for each of the IDS_COMMAND macros.
Also adds some identifiers nodes to declare specific ids to be included
in the generated grh file.
'''
import postprocess_interface
from grit import lazy_re
import grit.node.empty
from grit.node import misc
class ToolbarPostProcessor(postprocess_interface.PostProcessor):
''' Defines message groups within the grd file for each of the
IDS_COMMAND stuff.
'''
_IDS_COMMAND = lazy_re.compile(r'IDS_COMMAND_')
_GRAB_PARAMETERS = lazy_re.compile(
r'(IDS_COMMAND_[a-zA-Z0-9]+)_([a-zA-z0-9]+)')
def Process(self, rctext, rcpath, grdnode):
''' Processes the data in rctext and grdnode.
Args:
rctext: string containing the contents of the RC file being processed.
rcpath: the path used to access the file.
grdnode: the root node of the grd xml data generated by
the rc2grd tool.
Return:
The root node of the processed GRD tree.
'''
release = grdnode.children[2]
messages = release.children[2]
identifiers = grit.node.empty.IdentifiersNode()
identifiers.StartParsing('identifiers', release)
identifiers.EndParsing()
release.AddChild(identifiers)
#
# Turn the IDS_COMMAND messages into separate message groups
# with ids that are offsetted to the message group's first id
#
previous_name_attr = ''
previous_prefix = ''
previous_node = ''
new_messages_node = self.ConstructNewMessages(release)
for node in messages.children[:]:
name_attr = node.attrs['name']
if self._IDS_COMMAND.search(name_attr):
mo = self._GRAB_PARAMETERS.search(name_attr)
mp = self._GRAB_PARAMETERS.search(previous_name_attr)
if mo and mp:
prefix = mo.group(1)
previous_prefix = mp.group(1)
new_message_id = mp.group(2)
if prefix == previous_prefix:
messages.RemoveChild(previous_name_attr)
previous_node.attrs['offset'] = 'PCI_' + new_message_id
del previous_node.attrs['name']
new_messages_node.AddChild(previous_node)
else:
messages.RemoveChild(previous_name_attr)
previous_node.attrs['offset'] = 'PCI_' + new_message_id
del previous_node.attrs['name']
new_messages_node.AddChild(previous_node)
new_messages_node.attrs['first_id'] = previous_prefix
new_messages_node = self.ConstructNewMessages(release)
else:
if self._IDS_COMMAND.search(previous_name_attr):
messages.RemoveChild(previous_name_attr)
previous_prefix = mp.group(1)
new_message_id = mp.group(2)
previous_node.attrs['offset'] = 'PCI_' + new_message_id
del previous_node.attrs['name']
new_messages_node.AddChild(previous_node)
new_messages_node.attrs['first_id'] = previous_prefix
new_messages_node = self.ConstructNewMessages(release)
else:
if self._IDS_COMMAND.search(previous_name_attr):
messages.RemoveChild(previous_name_attr)
mp = self._GRAB_PARAMETERS.search(previous_name_attr)
previous_prefix = mp.group(1)
new_message_id = mp.group(2)
previous_node.attrs['offset'] = 'PCI_' + new_message_id
del previous_node.attrs['name']
new_messages_node.AddChild(previous_node)
new_messages_node.attrs['first_id'] = previous_prefix
new_messages_node = self.ConstructNewMessages(release)
previous_name_attr = name_attr
previous_node = node
self.AddIdentifiers(rctext, identifiers)
return grdnode
def ConstructNewMessages(self, parent):
new_node = grit.node.empty.MessagesNode()
new_node.StartParsing('messages', parent)
new_node.EndParsing()
parent.AddChild(new_node)
return new_node
def AddIdentifiers(self, rctext, node):
node.AddChild(misc.IdentifierNode.Construct(node, 'IDS_COMMAND_gcFirst', '12000', ''))
node.AddChild(misc.IdentifierNode.Construct(node,
'IDS_COMMAND_PCI_SPACE', '16', ''))
node.AddChild(misc.IdentifierNode.Construct(node, 'PCI_BUTTON', '0', ''))
node.AddChild(misc.IdentifierNode.Construct(node, 'PCI_MENU', '1', ''))
node.AddChild(misc.IdentifierNode.Construct(node, 'PCI_TIP', '2', ''))
node.AddChild(misc.IdentifierNode.Construct(node, 'PCI_OPTIONS_TEXT', '3', ''))
node.AddChild(misc.IdentifierNode.Construct(node, 'PCI_TIP_DISABLED', '4', ''))
node.AddChild(misc.IdentifierNode.Construct(node, 'PCI_TIP_MENU', '5', ''))
node.AddChild(misc.IdentifierNode.Construct(node, 'PCI_TIP_MENU_DISABLED', '6', ''))
node.AddChild(misc.IdentifierNode.Construct(node, 'PCI_TIP_OPTIONS', '7', ''))
node.AddChild(misc.IdentifierNode.Construct(node, 'PCI_TIP_OPTIONS_DISABLED', '8', ''))
node.AddChild(misc.IdentifierNode.Construct(node,
'PCI_TIP_DISABLED_BY_POLICY', '9', ''))
|
apache/libcloud | refs/heads/trunk | integration/compute/api/util.py | 2 | # Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from bottle import request
from functools import wraps
from integration.compute.config import EXPECTED_AUTH
def secure(f):
@wraps(f)
def secure_route(*args, **kwargs):
if 'Authorization' not in request.headers:
raise Exception('Argghhhh')
else:
auth = request.headers['Authorization']
if auth != EXPECTED_AUTH:
raise Exception('Bad authentication')
return f(*args, **kwargs)
return secure_route
|
ideamark/breadAI | refs/heads/master | breadbot/func/check_data.py | 1 | #!/usr/bin/env python3
import os
import sys
import yaml
from breadbot.core import common
LOG = common.ConsoleLog()
class CheckData(object):
def __init__(self):
pass
def _error(self, err, msg):
LOG.error('[Error] %s' % msg)
raise Exception(err)
def do_check(self, data_path_list=[]):
try:
if not data_path_list:
data_path_list = common.Cfg().get('local', 'data_paths')
data_path_list = common.expand_path(data_path_list)
data_path_list = common.get_yml_path_list(data_path_list)
for data_path in data_path_list:
print('')
LOG.info('Checking %s' % data_path)
with open(data_path, 'r') as f:
qas = yaml.load(f.read())
for qa in qas:
if not qa:
self._error(qa, 'qa is none')
for value in qa.values():
if not value:
self._error(qa, 'value is none')
else:
for item in value:
if type(item) is dict:
self._error(qa, 'item is dict')
if not item:
self._error(qa, 'item is none')
LOG.info('Check Passed!')
except Exception as e:
LOG.error(e)
|
cloudify-cosmo/packman | refs/heads/master | packman/definitions.py | 1 | # flake8: NOQA
# packman base definitions
PACKAGER_TEMPLATE_PATH = "package-templates/" # directory which contains config for all modules
# packman base params
PARAM_NAME = 'name' # a mandatory 'string' representing the package's name
PARAM_VERSION = 'version' # a mandatory 'string' representing the package's version
PARAM_DEPENDS = 'depends' # an optional [list] of the dependencies of the package
PARAM_REQUIRES = 'requires' # an optional [list] of the requirements to download
PARAM_PACKAGE_PATH = 'package_path' # a mandatory 'string' representing the destination path to be used in the packaging process
PARAM_SOURCES_PATH = 'sources_path' # a mandatory 'string' representing the source path to which files will be downloaded
PARAM_SOURCE_PACKAGE_TYPE = 'source_package_type' # an optional 'string' representing the source type of the package (as supported by fpm)
PARAM_DESTINATION_PACKAGE_TYPES = 'destination_package_types' # a mandatory [list] representing the destination types of the packages you'd like to create (as supported by fpm)
PARAM_BOOTSTRAP_SCRIPT_PATH = 'bootstrap_script' # an optional 'string' representing a path to which the bootstrap script (generated from the template) will be written
PARAM_BOOTSTRAP_TEMPLATE_PATH = 'bootstrap_template' # an optional 'string' representing a bootstrap script path to be appended to a deb or rpm (appended)
PARAM_OVERWRITE_OUTPUT = 'overwrite_package' # an optional bool representing whether to overwrite a destination package by default
PARAM_OVERWRITE_SOURCES = 'overwrite_sources' # an optional bool representing whether to overwrite sources when retrieving package sources
PARAM_CONFIG_TEMPLATE_CONFIG = 'config_templates' # an optional {dict} of config files and templates
PARAM_PYTHON_MODULES = 'python_modules' # an optional [list] of python modules to install into a virtualenv
PARAM_RUBY_GEMS = 'ruby_gems' # an optional [list] of ruby gems to download
PARAM_VIRTUALENV = 'virtualenv' # an optional {dict} containing a venv path and a [list] of modules to install
PARAM_NODE_PACKAGES = 'node_packages' # an optional [list] of node packages to download
PARAM_SOURCE_URLS = 'source_urls' # an optional 'string' representing the sources to download # TOOD: REPLACE WIT [LIST]!
PARAM_SOURCE_REPOS = 'source_repos' # an optional 'string' representing repos to add to the repos list
PARAM_SOURCE_PPAS = 'source_ppas' # an optional 'string' representing a ppa repository to add
PARAM_SOURCE_KEYS = 'source_keys' # an optional 'string' representing a key to download
PARAM_REQS = 'requires' # an optional [list] of requirements to download from the local distributions repos
PARAM_PREREQS = 'prereqs' # an optional [list] of prerequirements to install from before retrieving the sources or packgaging
PARAM_KEEP_SOURCES = 'keep_sources' # an optional 'bool' representing whether to keep the retrieved sources after packaging
# packman configuration files generation params
PARAM_CONFIG_TEMPLATE_DIR = 'template_dir' # an optional 'dict' containing config for generating config files from a templates directory
# if PARAM_CONFIG_TEPMLATE_DIR is used:
PARAM_CONFIG_TEMPALTES_DIR_TEMPLATES_PATH = 'templates' # a mandatory 'string' stating where the template files reside
PARAM_CONFIG_TEMPALTES_DIR_CONFIG_DIR = 'config_dir' # a mandatory 'string' stating where to in the package dir the processed files should reside.
PARAM_CONFIG_TEMPLATE_FILE = 'template_file' # an optional 'dict' containing config for generating a config file from a template file
# if PARAM_CONFIG_TEMPLATE_FILE is used:
PARAM_CONFIG_TEMPALTES_FILE_TEMPLATE_FILE = 'template' # a mandatory 'string' stating where the specific template file resides
PARAM_CONFIG_TEMPALTES_FILE_OUTPUT_FILE = 'output_file' # a mandatory 'string' stating the name of the output config file.
PARAM_CONFIG_TEMPALTES_FILE_CONFIG_DIR = 'config_dir' # a mandatory 'string' stating where to in the package dir the processed file should reside.
PARAM_CONFIG_CONFIG_DIR = 'config_dir' # an optional 'dict' containing config for copying config files from a config files directory
# if PARAM_CONFIG_CONFIG_DIR is used:
PARAM_CONFIG_FILES_CONFIGS_PATH = 'files' # a mandatory 'string' stating where the origin config files reside.
PARAM_CONFIG_FILES_CONFIGS_DIR = 'config_dir' # a mandatory 'string' stating where to in the package dir the processed file should reside.
PARAM_CONFIG_CONFIG_FILE = 'config_file' # an optional 'dict' containing config from copying a config file from a directory. (NOT IMPLEMENTED)
|
cosmicBboy/themis-ml | refs/heads/master | tests/test_counterfactually_fair_models.py | 1 | """Unit tests for counterfactually fair models."""
import numpy as np
import pytest
from themis_ml.linear_model import counterfactually_fair_models
from themis_ml.checks import is_binary, is_continuous
from conftest import create_random_X, create_y, create_s
# random_X_data fixture is in conftest.py
def test_get_binary_X_index(random_X_data):
X = create_random_X(random_X_data)
expected = np.concatenate([
random_X_data["bin"], random_X_data["bin"]], axis=1)
binary_index = counterfactually_fair_models._get_binary_X_index(X)
assert (X[:, binary_index] == expected).all()
def test_get_continuous_X(random_X_data):
X = create_random_X(random_X_data)
expected = np.concatenate([
random_X_data["cont"], random_X_data["cont"]], axis=1)
continuous_index = counterfactually_fair_models._get_continuous_X_index(X)
assert (X[:, continuous_index] == expected).all()
def test_fit_predict(random_X_data):
"""Test happy path of LinearACFClassifier `fit` and `predict` methods."""
X = create_random_X(random_X_data)
y = create_y()
s = create_s()
for residual_type in ["pearson", "deviance", "absolute"]:
lin_acf = counterfactually_fair_models.LinearACFClassifier(
binary_residual_type=residual_type)
lin_acf.fit(X, y, s)
lin_acf_pred_proba = lin_acf.predict_proba(X, s)[:, 1]
assert(lin_acf.fit_residuals_ ==
lin_acf._compute_residuals_on_predict(X, s)).all()
assert is_binary(lin_acf.predict(X, s))
assert is_continuous(lin_acf_pred_proba)
assert max(lin_acf_pred_proba) < 1
assert min(lin_acf_pred_proba) > 0
def test_binary_single_class(random_X_data):
"""Linear ACF can handle training data with single-valued column."""
X = create_random_X(random_X_data)
X = np.concatenate([
X, np.ones((X.shape[0], 1))
], axis=1)
s = create_s()
y = create_y()
lin_acf = counterfactually_fair_models.LinearACFClassifier()
for residual_type in ["pearson", "deviance", "absolute"]:
lin_acf = counterfactually_fair_models.LinearACFClassifier(
binary_residual_type=residual_type)
lin_acf.fit(X, y, s)
lin_acf_pred_proba = lin_acf.predict_proba(X, s)[:, 1]
assert(lin_acf.fit_residuals_ ==
lin_acf._compute_residuals_on_predict(X, s)).all()
assert is_binary(lin_acf.predict(X, s))
assert is_continuous(lin_acf_pred_proba)
assert max(lin_acf_pred_proba) < 1
assert min(lin_acf_pred_proba) > 0
def test_predict_value_error(random_X_data):
"""Raise ValueError if X doesn't have expected number of variables."""
X = create_random_X(random_X_data)
s = create_s()
lin_acf = counterfactually_fair_models.LinearACFClassifier()
lin_acf.fit(X, create_y(), s)
with pytest.raises(ValueError):
# pass in just a subset of the input variables
lin_acf.predict(X[:, 5], s)
lin_acf.predict_proba(X[:, 5], s)
def test_invalid_binary_residual_type(random_X_data):
with pytest.raises(ValueError):
counterfactually_fair_models.LinearACFClassifier(
binary_residual_type="foobar")
|
smalls257/VRvisu | refs/heads/master | Library/External.LCA_RESTRICTED/Languages/CPython/27/Lib/rexec.py | 228 | """Restricted execution facilities.
The class RExec exports methods r_exec(), r_eval(), r_execfile(), and
r_import(), which correspond roughly to the built-in operations
exec, eval(), execfile() and import, but executing the code in an
environment that only exposes those built-in operations that are
deemed safe. To this end, a modest collection of 'fake' modules is
created which mimics the standard modules by the same names. It is a
policy decision which built-in modules and operations are made
available; this module provides a reasonable default, but derived
classes can change the policies e.g. by overriding or extending class
variables like ok_builtin_modules or methods like make_sys().
XXX To do:
- r_open should allow writing tmp dir
- r_exec etc. with explicit globals/locals? (Use rexec("exec ... in ...")?)
"""
from warnings import warnpy3k
warnpy3k("the rexec module has been removed in Python 3.0", stacklevel=2)
del warnpy3k
import sys
import __builtin__
import os
import ihooks
import imp
__all__ = ["RExec"]
class FileBase:
ok_file_methods = ('fileno', 'flush', 'isatty', 'read', 'readline',
'readlines', 'seek', 'tell', 'write', 'writelines', 'xreadlines',
'__iter__')
class FileWrapper(FileBase):
# XXX This is just like a Bastion -- should use that!
def __init__(self, f):
for m in self.ok_file_methods:
if not hasattr(self, m) and hasattr(f, m):
setattr(self, m, getattr(f, m))
def close(self):
self.flush()
TEMPLATE = """
def %s(self, *args):
return getattr(self.mod, self.name).%s(*args)
"""
class FileDelegate(FileBase):
def __init__(self, mod, name):
self.mod = mod
self.name = name
for m in FileBase.ok_file_methods + ('close',):
exec TEMPLATE % (m, m)
class RHooks(ihooks.Hooks):
def __init__(self, *args):
# Hacks to support both old and new interfaces:
# old interface was RHooks(rexec[, verbose])
# new interface is RHooks([verbose])
verbose = 0
rexec = None
if args and type(args[-1]) == type(0):
verbose = args[-1]
args = args[:-1]
if args and hasattr(args[0], '__class__'):
rexec = args[0]
args = args[1:]
if args:
raise TypeError, "too many arguments"
ihooks.Hooks.__init__(self, verbose)
self.rexec = rexec
def set_rexec(self, rexec):
# Called by RExec instance to complete initialization
self.rexec = rexec
def get_suffixes(self):
return self.rexec.get_suffixes()
def is_builtin(self, name):
return self.rexec.is_builtin(name)
def init_builtin(self, name):
m = __import__(name)
return self.rexec.copy_except(m, ())
def init_frozen(self, name): raise SystemError, "don't use this"
def load_source(self, *args): raise SystemError, "don't use this"
def load_compiled(self, *args): raise SystemError, "don't use this"
def load_package(self, *args): raise SystemError, "don't use this"
def load_dynamic(self, name, filename, file):
return self.rexec.load_dynamic(name, filename, file)
def add_module(self, name):
return self.rexec.add_module(name)
def modules_dict(self):
return self.rexec.modules
def default_path(self):
return self.rexec.modules['sys'].path
# XXX Backwards compatibility
RModuleLoader = ihooks.FancyModuleLoader
RModuleImporter = ihooks.ModuleImporter
class RExec(ihooks._Verbose):
"""Basic restricted execution framework.
Code executed in this restricted environment will only have access to
modules and functions that are deemed safe; you can subclass RExec to
add or remove capabilities as desired.
The RExec class can prevent code from performing unsafe operations like
reading or writing disk files, or using TCP/IP sockets. However, it does
not protect against code using extremely large amounts of memory or
processor time.
"""
ok_path = tuple(sys.path) # That's a policy decision
ok_builtin_modules = ('audioop', 'array', 'binascii',
'cmath', 'errno', 'imageop',
'marshal', 'math', 'md5', 'operator',
'parser', 'select',
'sha', '_sre', 'strop', 'struct', 'time',
'_weakref')
ok_posix_names = ('error', 'fstat', 'listdir', 'lstat', 'readlink',
'stat', 'times', 'uname', 'getpid', 'getppid',
'getcwd', 'getuid', 'getgid', 'geteuid', 'getegid')
ok_sys_names = ('byteorder', 'copyright', 'exit', 'getdefaultencoding',
'getrefcount', 'hexversion', 'maxint', 'maxunicode',
'platform', 'ps1', 'ps2', 'version', 'version_info')
nok_builtin_names = ('open', 'file', 'reload', '__import__')
ok_file_types = (imp.C_EXTENSION, imp.PY_SOURCE)
def __init__(self, hooks = None, verbose = 0):
"""Returns an instance of the RExec class.
The hooks parameter is an instance of the RHooks class or a subclass
of it. If it is omitted or None, the default RHooks class is
instantiated.
Whenever the RExec module searches for a module (even a built-in one)
or reads a module's code, it doesn't actually go out to the file
system itself. Rather, it calls methods of an RHooks instance that
was passed to or created by its constructor. (Actually, the RExec
object doesn't make these calls --- they are made by a module loader
object that's part of the RExec object. This allows another level of
flexibility, which can be useful when changing the mechanics of
import within the restricted environment.)
By providing an alternate RHooks object, we can control the file
system accesses made to import a module, without changing the
actual algorithm that controls the order in which those accesses are
made. For instance, we could substitute an RHooks object that
passes all filesystem requests to a file server elsewhere, via some
RPC mechanism such as ILU. Grail's applet loader uses this to support
importing applets from a URL for a directory.
If the verbose parameter is true, additional debugging output may be
sent to standard output.
"""
raise RuntimeError, "This code is not secure in Python 2.2 and later"
ihooks._Verbose.__init__(self, verbose)
# XXX There's a circular reference here:
self.hooks = hooks or RHooks(verbose)
self.hooks.set_rexec(self)
self.modules = {}
self.ok_dynamic_modules = self.ok_builtin_modules
list = []
for mname in self.ok_builtin_modules:
if mname in sys.builtin_module_names:
list.append(mname)
self.ok_builtin_modules = tuple(list)
self.set_trusted_path()
self.make_builtin()
self.make_initial_modules()
# make_sys must be last because it adds the already created
# modules to its builtin_module_names
self.make_sys()
self.loader = RModuleLoader(self.hooks, verbose)
self.importer = RModuleImporter(self.loader, verbose)
def set_trusted_path(self):
# Set the path from which dynamic modules may be loaded.
# Those dynamic modules must also occur in ok_builtin_modules
self.trusted_path = filter(os.path.isabs, sys.path)
def load_dynamic(self, name, filename, file):
if name not in self.ok_dynamic_modules:
raise ImportError, "untrusted dynamic module: %s" % name
if name in sys.modules:
src = sys.modules[name]
else:
src = imp.load_dynamic(name, filename, file)
dst = self.copy_except(src, [])
return dst
def make_initial_modules(self):
self.make_main()
self.make_osname()
# Helpers for RHooks
def get_suffixes(self):
return [item # (suff, mode, type)
for item in imp.get_suffixes()
if item[2] in self.ok_file_types]
def is_builtin(self, mname):
return mname in self.ok_builtin_modules
# The make_* methods create specific built-in modules
def make_builtin(self):
m = self.copy_except(__builtin__, self.nok_builtin_names)
m.__import__ = self.r_import
m.reload = self.r_reload
m.open = m.file = self.r_open
def make_main(self):
self.add_module('__main__')
def make_osname(self):
osname = os.name
src = __import__(osname)
dst = self.copy_only(src, self.ok_posix_names)
dst.environ = e = {}
for key, value in os.environ.items():
e[key] = value
def make_sys(self):
m = self.copy_only(sys, self.ok_sys_names)
m.modules = self.modules
m.argv = ['RESTRICTED']
m.path = map(None, self.ok_path)
m.exc_info = self.r_exc_info
m = self.modules['sys']
l = self.modules.keys() + list(self.ok_builtin_modules)
l.sort()
m.builtin_module_names = tuple(l)
# The copy_* methods copy existing modules with some changes
def copy_except(self, src, exceptions):
dst = self.copy_none(src)
for name in dir(src):
setattr(dst, name, getattr(src, name))
for name in exceptions:
try:
delattr(dst, name)
except AttributeError:
pass
return dst
def copy_only(self, src, names):
dst = self.copy_none(src)
for name in names:
try:
value = getattr(src, name)
except AttributeError:
continue
setattr(dst, name, value)
return dst
def copy_none(self, src):
m = self.add_module(src.__name__)
m.__doc__ = src.__doc__
return m
# Add a module -- return an existing module or create one
def add_module(self, mname):
m = self.modules.get(mname)
if m is None:
self.modules[mname] = m = self.hooks.new_module(mname)
m.__builtins__ = self.modules['__builtin__']
return m
# The r* methods are public interfaces
def r_exec(self, code):
"""Execute code within a restricted environment.
The code parameter must either be a string containing one or more
lines of Python code, or a compiled code object, which will be
executed in the restricted environment's __main__ module.
"""
m = self.add_module('__main__')
exec code in m.__dict__
def r_eval(self, code):
"""Evaluate code within a restricted environment.
The code parameter must either be a string containing a Python
expression, or a compiled code object, which will be evaluated in
the restricted environment's __main__ module. The value of the
expression or code object will be returned.
"""
m = self.add_module('__main__')
return eval(code, m.__dict__)
def r_execfile(self, file):
"""Execute the Python code in the file in the restricted
environment's __main__ module.
"""
m = self.add_module('__main__')
execfile(file, m.__dict__)
def r_import(self, mname, globals={}, locals={}, fromlist=[]):
"""Import a module, raising an ImportError exception if the module
is considered unsafe.
This method is implicitly called by code executing in the
restricted environment. Overriding this method in a subclass is
used to change the policies enforced by a restricted environment.
"""
return self.importer.import_module(mname, globals, locals, fromlist)
def r_reload(self, m):
"""Reload the module object, re-parsing and re-initializing it.
This method is implicitly called by code executing in the
restricted environment. Overriding this method in a subclass is
used to change the policies enforced by a restricted environment.
"""
return self.importer.reload(m)
def r_unload(self, m):
"""Unload the module.
Removes it from the restricted environment's sys.modules dictionary.
This method is implicitly called by code executing in the
restricted environment. Overriding this method in a subclass is
used to change the policies enforced by a restricted environment.
"""
return self.importer.unload(m)
# The s_* methods are similar but also swap std{in,out,err}
def make_delegate_files(self):
s = self.modules['sys']
self.delegate_stdin = FileDelegate(s, 'stdin')
self.delegate_stdout = FileDelegate(s, 'stdout')
self.delegate_stderr = FileDelegate(s, 'stderr')
self.restricted_stdin = FileWrapper(sys.stdin)
self.restricted_stdout = FileWrapper(sys.stdout)
self.restricted_stderr = FileWrapper(sys.stderr)
def set_files(self):
if not hasattr(self, 'save_stdin'):
self.save_files()
if not hasattr(self, 'delegate_stdin'):
self.make_delegate_files()
s = self.modules['sys']
s.stdin = self.restricted_stdin
s.stdout = self.restricted_stdout
s.stderr = self.restricted_stderr
sys.stdin = self.delegate_stdin
sys.stdout = self.delegate_stdout
sys.stderr = self.delegate_stderr
def reset_files(self):
self.restore_files()
s = self.modules['sys']
self.restricted_stdin = s.stdin
self.restricted_stdout = s.stdout
self.restricted_stderr = s.stderr
def save_files(self):
self.save_stdin = sys.stdin
self.save_stdout = sys.stdout
self.save_stderr = sys.stderr
def restore_files(self):
sys.stdin = self.save_stdin
sys.stdout = self.save_stdout
sys.stderr = self.save_stderr
def s_apply(self, func, args=(), kw={}):
self.save_files()
try:
self.set_files()
r = func(*args, **kw)
finally:
self.restore_files()
return r
def s_exec(self, *args):
"""Execute code within a restricted environment.
Similar to the r_exec() method, but the code will be granted access
to restricted versions of the standard I/O streams sys.stdin,
sys.stderr, and sys.stdout.
The code parameter must either be a string containing one or more
lines of Python code, or a compiled code object, which will be
executed in the restricted environment's __main__ module.
"""
return self.s_apply(self.r_exec, args)
def s_eval(self, *args):
"""Evaluate code within a restricted environment.
Similar to the r_eval() method, but the code will be granted access
to restricted versions of the standard I/O streams sys.stdin,
sys.stderr, and sys.stdout.
The code parameter must either be a string containing a Python
expression, or a compiled code object, which will be evaluated in
the restricted environment's __main__ module. The value of the
expression or code object will be returned.
"""
return self.s_apply(self.r_eval, args)
def s_execfile(self, *args):
"""Execute the Python code in the file in the restricted
environment's __main__ module.
Similar to the r_execfile() method, but the code will be granted
access to restricted versions of the standard I/O streams sys.stdin,
sys.stderr, and sys.stdout.
"""
return self.s_apply(self.r_execfile, args)
def s_import(self, *args):
"""Import a module, raising an ImportError exception if the module
is considered unsafe.
This method is implicitly called by code executing in the
restricted environment. Overriding this method in a subclass is
used to change the policies enforced by a restricted environment.
Similar to the r_import() method, but has access to restricted
versions of the standard I/O streams sys.stdin, sys.stderr, and
sys.stdout.
"""
return self.s_apply(self.r_import, args)
def s_reload(self, *args):
"""Reload the module object, re-parsing and re-initializing it.
This method is implicitly called by code executing in the
restricted environment. Overriding this method in a subclass is
used to change the policies enforced by a restricted environment.
Similar to the r_reload() method, but has access to restricted
versions of the standard I/O streams sys.stdin, sys.stderr, and
sys.stdout.
"""
return self.s_apply(self.r_reload, args)
def s_unload(self, *args):
"""Unload the module.
Removes it from the restricted environment's sys.modules dictionary.
This method is implicitly called by code executing in the
restricted environment. Overriding this method in a subclass is
used to change the policies enforced by a restricted environment.
Similar to the r_unload() method, but has access to restricted
versions of the standard I/O streams sys.stdin, sys.stderr, and
sys.stdout.
"""
return self.s_apply(self.r_unload, args)
# Restricted open(...)
def r_open(self, file, mode='r', buf=-1):
"""Method called when open() is called in the restricted environment.
The arguments are identical to those of the open() function, and a
file object (or a class instance compatible with file objects)
should be returned. RExec's default behaviour is allow opening
any file for reading, but forbidding any attempt to write a file.
This method is implicitly called by code executing in the
restricted environment. Overriding this method in a subclass is
used to change the policies enforced by a restricted environment.
"""
mode = str(mode)
if mode not in ('r', 'rb'):
raise IOError, "can't open files for writing in restricted mode"
return open(file, mode, buf)
# Restricted version of sys.exc_info()
def r_exc_info(self):
ty, va, tr = sys.exc_info()
tr = None
return ty, va, tr
def test():
import getopt, traceback
opts, args = getopt.getopt(sys.argv[1:], 'vt:')
verbose = 0
trusted = []
for o, a in opts:
if o == '-v':
verbose = verbose+1
if o == '-t':
trusted.append(a)
r = RExec(verbose=verbose)
if trusted:
r.ok_builtin_modules = r.ok_builtin_modules + tuple(trusted)
if args:
r.modules['sys'].argv = args
r.modules['sys'].path.insert(0, os.path.dirname(args[0]))
else:
r.modules['sys'].path.insert(0, "")
fp = sys.stdin
if args and args[0] != '-':
try:
fp = open(args[0])
except IOError, msg:
print "%s: can't open file %r" % (sys.argv[0], args[0])
return 1
if fp.isatty():
try:
import readline
except ImportError:
pass
import code
class RestrictedConsole(code.InteractiveConsole):
def runcode(self, co):
self.locals['__builtins__'] = r.modules['__builtin__']
r.s_apply(code.InteractiveConsole.runcode, (self, co))
try:
RestrictedConsole(r.modules['__main__'].__dict__).interact()
except SystemExit, n:
return n
else:
text = fp.read()
fp.close()
c = compile(text, fp.name, 'exec')
try:
r.s_exec(c)
except SystemExit, n:
return n
except:
traceback.print_exc()
return 1
if __name__ == '__main__':
sys.exit(test())
|
RPGOne/Skynet | refs/heads/Miho | pytorch-master/torch/legacy/nn/MultiLabelMarginCriterion.py | 1 | import torch
from .Criterion import Criterion
class MultiLabelMarginCriterion(Criterion):
def __init__(self, sizeAverage=True):
super(MultiLabelMarginCriterion, self).__init__()
self.sizeAverage = sizeAverage
self.isTarget = torch.Tensor()
self.output_tensor = None
def updateOutput(self, input, target):
if self.output_tensor is None:
self.output_tensor = input.new(1)
target = target.long()
self._backend.MultiLabelMarginCriterion_updateOutput(
self._backend.library_state,
input,
target,
self.output_tensor,
self.isTarget,
self.sizeAverage
)
self.output = self.output_tensor[0]
return self.output
def updateGradInput(self, input, target):
target = target.long()
self._backend.MultiLabelMarginCriterion_updateGradInput(
self._backend.library_state,
input,
target,
self.gradInput,
self.isTarget,
self.sizeAverage
)
return self.gradInput
|
cbrewster/servo | refs/heads/master | python/servo/lints/wpt_lint.py | 28 | # Copyright 2013 The Servo Project Developers. See the COPYRIGHT
# file at the top-level directory of this distribution.
#
# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
# option. This file may not be copied, modified, or distributed
# except according to those terms.
import os
import sys
from servo_tidy.tidy import LintRunner, filter_file
WPT_PATH = os.path.join(".", "tests", "wpt")
SUITES = ["web-platform-tests", os.path.join("mozilla", "tests")]
class Lint(LintRunner):
def _get_wpt_files(self, suite):
working_dir = os.path.join(WPT_PATH, suite, '')
file_iter = self.get_files(working_dir, exclude_dirs=[])
print '\nRunning the WPT lint on %s...' % working_dir
for f in file_iter:
if filter_file(f):
yield f[len(working_dir):]
def run(self):
if self.stylo:
return
wpt_working_dir = os.path.abspath(os.path.join(WPT_PATH, "web-platform-tests"))
for suite in SUITES:
files = self._get_wpt_files(suite)
sys.path.insert(0, wpt_working_dir)
from tools.lint import lint
sys.path.remove(wpt_working_dir)
file_dir = os.path.abspath(os.path.join(WPT_PATH, suite))
returncode = lint.lint(file_dir, list(files), output_format="json")
if returncode:
yield ("WPT Lint Tool", "", "lint error(s) in Web Platform Tests: exit status %s" % returncode)
|
chemelnucfin/tensorflow | refs/heads/master | tensorflow/python/keras/saving/save.py | 2 | # Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Keras model saving code."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import six
from tensorflow.python import tf2
from tensorflow.python.keras.saving import hdf5_format
from tensorflow.python.keras.saving.saved_model import load as saved_model_load
from tensorflow.python.keras.saving.saved_model import save as saved_model_save
from tensorflow.python.saved_model import loader_impl
from tensorflow.python.util.tf_export import keras_export
# pylint: disable=g-import-not-at-top
try:
import h5py
except ImportError:
h5py = None
# pylint: enable=g-import-not-at-top
_HDF5_EXTENSIONS = ['.h5', '.hdf5', '.keras']
# TODO(kathywu): Remove this when Keras SavedModel is not experimental.
_KERAS_SAVED_MODEL_STILL_EXPERIMENTAL = True
@keras_export('keras.models.save_model')
def save_model(model,
filepath,
overwrite=True,
include_optimizer=True,
save_format=None,
signatures=None,
options=None):
"""Saves a model as a TensorFlow SavedModel or HDF5 file.
The saved model contains:
- the model's configuration (topology)
- the model's weights
- the model's optimizer's state (if any)
Thus the saved model can be reinstantiated in
the exact same state, without any of the code
used for model definition or training.
_SavedModel serialization_ (not yet added)
The SavedModel serialization path uses `tf.saved_model.save` to save the model
and all trackable objects attached to the model (e.g. layers and variables).
`@tf.function`-decorated methods are also saved. Additional trackable objects
and functions are added to the SavedModel to allow the model to be
loaded back as a Keras Model object.
Arguments:
model: Keras model instance to be saved.
filepath: One of the following:
- String, path where to save the model
- `h5py.File` object where to save the model
overwrite: Whether we should overwrite any existing model at the target
location, or instead ask the user with a manual prompt.
include_optimizer: If True, save optimizer's state together.
save_format: Either 'tf' or 'h5', indicating whether to save the model
to Tensorflow SavedModel or HDF5. Defaults to 'tf' in TF 2.X, and 'h5'
in TF 1.X.
signatures: Signatures to save with the SavedModel. Applicable to the 'tf'
format only. Please see the `signatures` argument in
`tf.saved_model.save` for details.
options: Optional `tf.saved_model.SaveOptions` object that specifies
options for saving to SavedModel.
Raises:
ImportError: If save format is hdf5, and h5py is not available.
"""
from tensorflow.python.keras.engine import sequential # pylint: disable=g-import-not-at-top
default_format = 'tf' if tf2.enabled() else 'h5'
save_format = save_format or default_format
if (save_format == 'h5' or
(h5py is not None and isinstance(filepath, h5py.File)) or
os.path.splitext(filepath)[1] in _HDF5_EXTENSIONS):
# TODO(b/130258301): add utility method for detecting model type.
if (not model._is_graph_network and # pylint:disable=protected-access
not isinstance(model, sequential.Sequential)):
raise NotImplementedError(
'Saving the model to HDF5 format requires the model to be a '
'Functional model or a Sequential model. It does not work for '
'subclassed models, because such models are defined via the body of '
'a Python method, which isn\'t safely serializable. Consider saving '
'to the Tensorflow SavedModel format (by setting save_format="tf") '
'or using `save_weights`.')
hdf5_format.save_model_to_hdf5(
model, filepath, overwrite, include_optimizer)
else:
saved_model_save.save(model, filepath, overwrite, include_optimizer,
signatures, options)
@keras_export('keras.models.load_model')
def load_model(filepath, custom_objects=None, compile=True): # pylint: disable=redefined-builtin
"""Loads a model saved via `save_model`.
Arguments:
filepath: One of the following:
- String, path to the saved model
- `h5py.File` object from which to load the model
custom_objects: Optional dictionary mapping names
(strings) to custom classes or functions to be
considered during deserialization.
compile: Boolean, whether to compile the model
after loading.
Returns:
A Keras model instance. If an optimizer was found
as part of the saved model, the model is already
compiled. Otherwise, the model is uncompiled and
a warning will be displayed. When `compile` is set
to False, the compilation is omitted without any
warning.
Raises:
ImportError: if loading from an hdf5 file and h5py is not available.
IOError: In case of an invalid savefile.
"""
if (h5py is not None and (
isinstance(filepath, h5py.File) or h5py.is_hdf5(filepath))):
return hdf5_format.load_model_from_hdf5(filepath, custom_objects, compile)
if isinstance(filepath, six.string_types):
loader_impl.parse_saved_model(filepath)
return saved_model_load.load(filepath, compile)
raise IOError(
'Unable to load model. Filepath is not an hdf5 file (or h5py is not '
'available) or SavedModel.')
|
blademainer/intellij-community | refs/heads/master | python/testData/mover/insideStatement_afterUp.py | 83 | def foo(a, b):
if a:
pass
else:
a = 3
if b:
pass |
lisroach/PyRESTCONF | refs/heads/master | rest_calls/jsonRestClient.py | 1 | from rest_calls.restClient import RestCalls
class JSONRestCalls(RestCalls):
Format = 'json'
def __repr__(self):
return '%s(Session Object%s, Host = %s, Format = %s)' % (
self.__class__.__name__,
self._session.headers.items(),
self._host,
self.Format
)
|
WIPACrepo/iceprod | refs/heads/master | tests/core/data_transfer_test.py | 1 | """
Test script for core data_transfer
"""
from tests.util import unittest_reporter, glob_tests
import logging
logger = logging.getLogger('data_transfer_test')
import os
import sys
import time
import shutil
import tempfile
import random
import string
import subprocess
from functools import partial, reduce
try:
import cPickle as pickle
except:
import pickle
import unittest
from unittest.mock import patch, MagicMock
from tornado.testing import AsyncTestCase
from iceprod.core import to_log,constants
import iceprod.core.serialization
import iceprod.core.functions
import iceprod.core.exe
import iceprod.core.data_transfer
from iceprod.core.jsonUtil import json_encode,json_decode
class data_transfer_test(AsyncTestCase):
def setUp(self):
super(data_transfer_test,self).setUp()
self.test_dir = tempfile.mkdtemp(dir=os.getcwd())
curdir = os.getcwd()
os.symlink(os.path.join(curdir, 'iceprod'),
os.path.join(self.test_dir, 'iceprod'))
os.chdir(self.test_dir)
def cleanup():
os.chdir(curdir)
shutil.rmtree(self.test_dir)
self.addCleanup(cleanup)
# clean up environment
base_env = dict(os.environ)
def reset_env():
for k in set(os.environ).difference(base_env):
del os.environ[k]
for k in base_env:
os.environ[k] = base_env[k]
self.addCleanup(reset_env)
@unittest_reporter
def test_001_get_current_task(self):
config = {
'options': {},
'tasks': [
{'name': 'foo'},
],
}
with self.assertRaises(Exception):
iceprod.core.data_transfer.get_current_task(config)
config['options']['task'] = 'foo'
ret = iceprod.core.data_transfer.get_current_task(config)
self.assertEqual(ret, config['tasks'][0])
@patch('iceprod.core.exe.functions.download')
@patch('iceprod.core.exe.functions.upload')
@unittest_reporter(name='process - download')
async def test_100_process(self, upload, download):
config = iceprod.core.serialization.dict_to_dataclasses({
'options': {
'task': 'foo',
'data_url': 'http://localhost/',
},
'steering': {
'value': 'blah',
},
'tasks': [
{
'name': 'foo',
'iterations': 1,
'trays': [
{
'modules': [
{
'data': [
{
'movement':'input',
'remote': 'foo',
'local': 'bar',
},
],
},
],
},
],
},
],
})
async def d(url, local, **kwargs):
with open(local, 'w') as f:
f.write('test')
download.side_effect = d
await iceprod.core.data_transfer.process(config)
download.assert_called()
@patch('iceprod.core.exe.functions.download')
@patch('iceprod.core.exe.functions.upload')
@unittest_reporter(name='process - upload')
async def test_110_process(self, upload, download):
config = iceprod.core.serialization.dict_to_dataclasses({
'options': {
'task': 'foo',
'data_url': 'http://localhost/',
},
'steering': {
'value': 'blah',
},
'tasks': [
{
'name': 'foo',
'iterations': 1,
'trays': [
{
'modules': [
{
'data': [
{
'movement':'output',
'remote': 'foo',
'local': 'bar',
},
],
},
],
},
],
},
],
})
with open('bar', 'w') as f:
f.write('test')
await iceprod.core.data_transfer.process(config)
upload.assert_called()
@patch('iceprod.core.exe.functions.download')
@patch('iceprod.core.exe.functions.upload')
@unittest_reporter(name='process - both')
async def test_120_process(self, upload, download):
config = iceprod.core.serialization.dict_to_dataclasses({
'options': {
'task': 'foo',
'data_url': 'http://localhost/',
},
'steering': {
'value': 'blah',
},
'tasks': [
{
'name': 'foo',
'iterations': 1,
'trays': [
{
'modules': [
{
'data': [
{
'movement':'both',
'remote': 'foo',
'local': 'bar',
},
],
},
],
},
],
},
],
})
async def d(url, local, **kwargs):
with open(local, 'w') as f:
f.write('test')
download.side_effect = d
await iceprod.core.data_transfer.process(config)
download.assert_called()
upload.assert_called()
@patch('iceprod.core.exe.functions.download')
@patch('iceprod.core.exe.functions.upload')
@unittest_reporter(name='process - iterations')
async def test_200_process(self, upload, download):
config = iceprod.core.serialization.dict_to_dataclasses({
'options': {
'task': 'foo',
'data_url': 'http://localhost/',
},
'steering': {
'value': 'blah',
},
'tasks': [
{
'name': 'foo',
'trays': [
{
'iterations': 3,
'modules': [
{
'data': [
{
'movement':'output',
'remote': 'foo',
'local': 'bar.$(iter)',
},
],
},
],
},
],
},
],
})
for i in range(0,3):
with open(f'bar.{i}', 'w') as f:
f.write('test')
await iceprod.core.data_transfer.process(config)
self.assertEqual(upload.call_count, 3)
def load_tests(loader, tests, pattern):
suite = unittest.TestSuite()
alltests = glob_tests(loader.getTestCaseNames(data_transfer_test))
suite.addTests(loader.loadTestsFromNames(alltests,data_transfer_test))
return suite
|
dirtycold/git-cola | refs/heads/master | test/i18n_test.py | 3 | from __future__ import absolute_import, division, unicode_literals
import unittest
from cola import i18n
from cola.i18n import N_
from cola.compat import unichr
class ColaI18nTestCase(unittest.TestCase):
"""Test cases for the ColaApplication class"""
def tearDown(self):
i18n.uninstall()
def test_translates_noun(self):
"""Test that strings with @@noun are translated
"""
i18n.install('ja_JP')
expect = (unichr(0x30b3) + unichr(0x30df) +
unichr(0x30c3) + unichr(0x30c8))
actual = N_('Commit@@verb')
self.assertEqual(expect, actual)
def test_translates_verb(self):
"""Test that strings with @@verb are translated
"""
i18n.install('de_DE')
expect = 'Version aufnehmen'
actual = N_('Commit@@verb')
self.assertEqual(expect, actual)
def test_translates_english_noun(self):
"""Test that English strings with @@noun are properly handled
"""
i18n.install('en_US.UTF-8')
expect = 'Commit'
actual = N_('Commit@@noun')
self.assertEqual(expect, actual)
def test_translates_english_verb(self):
"""Test that English strings with @@verb are properly handled
"""
i18n.install('en_US.UTF-8')
expect = 'Commit'
actual = N_('Commit@@verb')
self.assertEqual(expect, actual)
def test_translates_random_english(self):
"""Test that random English strings are passed through as-is
"""
i18n.install('en_US.UTF-8')
expect = 'Random'
actual = N_('Random')
self.assertEqual(expect, actual)
if __name__ == '__main__':
unittest.main()
|
Epirex/android_external_chromium_org | refs/heads/cm-11.0 | tools/telemetry/telemetry/core/platform/platform_backend.py | 23 | # Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
class PlatformBackend(object):
def IsRawDisplayFrameRateSupported(self):
return False
# pylint: disable=W0613
def StartRawDisplayFrameRateMeasurement(self):
raise NotImplementedError()
def StopRawDisplayFrameRateMeasurement(self):
raise NotImplementedError()
def GetRawDisplayFrameRateMeasurements(self):
raise NotImplementedError()
def SetFullPerformanceModeEnabled(self, enabled): # pylint: disable=W0613
pass
def CanMonitorThermalThrottling(self):
return False
def IsThermallyThrottled(self):
raise NotImplementedError()
def HasBeenThermallyThrottled(self):
raise NotImplementedError()
def GetSystemCommitCharge(self):
raise NotImplementedError()
def GetCpuStats(self, pid): # pylint: disable=W0613
return {}
def GetCpuTimestamp(self): # pylint: disable=W0613
return {}
def GetMemoryStats(self, pid): # pylint: disable=W0613
return {}
def GetIOStats(self, pid): # pylint: disable=W0613
return {}
def GetChildPids(self, pid): # pylint: disable=W0613
raise NotImplementedError()
def GetCommandLine(self, pid):
raise NotImplementedError()
def GetOSName(self):
raise NotImplementedError()
def GetOSVersionName(self):
return None
def CanFlushIndividualFilesFromSystemCache(self):
raise NotImplementedError()
def FlushEntireSystemCache(self):
raise NotImplementedError()
def FlushSystemCacheForDirectory(self, directory, ignoring=None):
raise NotImplementedError()
def LaunchApplication(self, application, parameters=None):
raise NotImplementedError()
def IsApplicationRunning(self, application):
raise NotImplementedError()
def CanLaunchApplication(self, application):
return False
def InstallApplication(self, application):
raise NotImplementedError()
def CanCaptureVideo(self):
return False
def StartVideoCapture(self, min_bitrate_mbps):
raise NotImplementedError()
def StopVideoCapture(self):
raise NotImplementedError()
|
franramirez688/common | refs/heads/develop | publish/publish_manager.py | 2 | from biicode.common.exception import (PublishException, UpToDatePublishException,
ForbiddenException, AuthenticationException)
from biicode.common.edition import changevalidator
from biicode.common.diffmerge.compare import compare
from biicode.common.model.symbolic.block_version import BlockVersion
from biicode.common.publish.publish_request import PublishRequest
from biicode.common.edition.processors.processor_changes import ProcessorChanges
from biicode.common.edition.block_holder import BIICODE_FILE
def update_config(new_parent_version, editionapi, hive_holder):
""" after a publication, the parent version has to be updated
"""
assert isinstance(new_parent_version, BlockVersion)
assert new_parent_version.time is not None
assert new_parent_version.time != -1
hive = hive_holder.hive
block_name = new_parent_version.block_name
block_holder = hive_holder[block_name]
block_holder.parent = new_parent_version
def commit_conf(block_holder):
new_resource = block_holder.commit_config()
if new_resource:
processor_changes = ProcessorChanges()
processor_changes.upsert(new_resource.name, new_resource.content, blob_changed=True)
hive.update(processor_changes)
editionapi.save_hive_changes(hive, processor_changes)
commit_conf(block_holder)
for block_holder in hive_holder.block_holders:
requirements = block_holder.requirements
if block_name in requirements:
requirements.add_version(new_parent_version)
commit_conf(block_holder)
def block_changed(changes, block_holder, other_holder):
if other_holder is None:
return True
if len(changes) == 1 and len(changes.modified) == 1 and BIICODE_FILE in changes.modified:
return block_holder.config.changed(other_holder.config)
return len(changes) > 0
def build_publish_request(biiapi, hive_holder, block_name, tag, msg,
versiontag, origin, biiout):
block_name, dep_block_names = _check_input(hive_holder, block_name)
_check_dep_blocks(biiapi, hive_holder, dep_block_names, biiout)
block_holder = hive_holder[block_name]
if not changevalidator.check_block_size(block_holder, biiout):
raise PublishException("Block is too large to be published")
parent = block_holder.parent
_check_possible(parent, biiapi, biiout)
if parent.time != -1: # Update
remote_block_holder = biiapi.get_block_holder(parent)
base_resources = remote_block_holder.resources
parent_delta_info = biiapi.get_version_delta_info(parent)
else: # New block
base_resources = None
parent_delta_info = None
remote_block_holder = None
changes = compare(base_resources, block_holder.resources)
if not block_changed(changes, block_holder, remote_block_holder):
if parent_delta_info and tag > parent_delta_info.tag:
biiout.info('No changes, promoting tag %s -> %s' % (parent_delta_info.tag, tag))
changes.modified.pop(BIICODE_FILE, None)
else:
raise UpToDatePublishException("Up to date, nothing to publish")
changes.deduce_renames()
request = PublishRequest()
request.parent = parent
request.changes = changes
if parent_delta_info:
request.parent_time = parent_delta_info.date
assert all(bv.time is not None for bv in block_holder.requirements.itervalues())
request.deptable = block_holder.requirements
request.tag = tag
request.msg = msg
request.versiontag = versiontag
request.origin = origin
return request
def _check_input(hive_holder, block_name):
'''basics checks: block_name in hive and in block_graph, and the graph
(just the SRC graph) has no cycles
param block_name: Can be None
return: (block_name, set(BlockName)) of dependents blocks in blocks folder'''
assert(block_name is not None)
if block_name not in hive_holder.blocks:
raise PublishException('Block "%s" does not exist in your project' % block_name)
hive_dependencies = hive_holder.hive.hive_dependencies
gr = hive_dependencies.version_graph
cycles = gr.get_cycles()
if cycles:
raise PublishException('There is a cycle between your blocks: %s\n'
'Please fix it. Aborting publication' % cycles)
if block_name not in gr.versions:
raise PublishException('Block %s not in current graph. Aborting publication.\n'
'This seems a biicode internal error, please contact us'
% block_name)
versions = gr.versions[block_name]
assert len(versions) == 1
version = iter(versions).next()
dependent_versions = gr.compute_closure(version)
dep_blocks = {v.block_name for v in dependent_versions if v.block_name in hive_holder.blocks}
return block_name, dep_blocks
def _check_dep_blocks(biiapi, hive_holder, dep_blocks, biiout):
'''check that the dependent blocks have no modifications'''
modified = False
parents = {b.parent.block_name: b.parent for b in hive_holder.block_holders}
for block_name in dep_blocks:
edition_block_holder = hive_holder[block_name]
# Modify requirements (only in memory) for comparison _block_changed below
requirements = edition_block_holder.requirements
for parent_name, parent in parents.iteritems():
if parent_name in requirements:
requirements.add_version(parent)
# Now check parents
parent = edition_block_holder.parent
if parent.time == -1:
modified = True
biiout.error('Block %s has never been published. Publish it first' % (block_name))
else:
remote_block_holder = biiapi.get_block_holder(parent)
changes = compare(remote_block_holder.resources, edition_block_holder.resources)
if block_changed(changes, edition_block_holder, remote_block_holder):
modified = True
biiout.error('Block %s is modified. Publish it first' % (block_name))
if modified:
raise PublishException('There are modified blocks that must be published first')
def _check_possible(parent, biiapi, biiout):
no_permissions_message = 'Unauthorized publication to "%s"' % str(parent)
try:
# If no logged username, can't publish, so force signin
biiapi.require_auth()
except (ForbiddenException, AuthenticationException):
raise PublishException(no_permissions_message)
try:
block_info = biiapi.get_block_info(parent.block)
private_str = "private" if block_info.private else "public "
biiout.info("*****************************")
biiout.info("***** Publishing %s****" % private_str)
biiout.info("*****************************")
except ForbiddenException:
raise PublishException(no_permissions_message)
if not block_info.can_write:
raise PublishException("You don't have permission to publish in %s" % str(parent))
if block_info.last_version.time != parent.time:
if block_info.last_version.time == -1:
raise PublishException("You are outdated, you are modifying %s\n"
" but the block is empty or has been deleted\n"
" Delete your [parent] to be able to publish again\n"
% (str(parent)))
else:
raise PublishException("You are outdated, you are modifying %s\n"
" but last version is %s\n"
" You can:\n"
" - 'bii update' to integrate changes\n"
" - modify your [parent] to discard last version\n"
" - close and open your block to discard your changes\n"
% (str(parent), str(block_info.last_version)))
|
Alexey95/physpy | refs/heads/master | setup.py | 1 | #!/usr/bin/python2
#-*- coding: utf-8 -*-
#
# This file is released under the MIT License.
#
# (C) Copyright 2012 Alessio Colucci
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation
# files (the “Software”), to deal in the Software without
# restriction, including without limitation the rights to use,
# copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following
# conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
#
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
import physpy
classifiers = ["Development Status :: 2 - Pre-Alpha",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: MIT License",
"Natural Language :: English",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2"
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.2",
"Topic :: Scientific/Engineering :: Mathematics",
"Topic :: Scientific/Engineering :: Physics",
"Topic :: Software Development :: Libraries :: Python Modules"]
description = "A Python library to handle Physics and Maths operations."
email = "[email protected]"
long_description = """
physpy is a Python library to handle Physics and Maths operations.
Right now it handles only few basic Maths operations, like Polynomials and
gcd, but they will be expanded in the near future. Moreover, the existing
operations need to be refactored to simplify the whole library.""".strip()
setup(author=physpy.__author__,
author_email=email,
classifiers=classifiers,
descrption=description,
license="MIT",
long_description=long_description,
mantainer=physpy.__author__,
mantainer_email=email,
name=physpy.__name__,
packages=["physpy"],
platforms=["any"],
test_suite="run_tests.suite") |
ChenJunor/hue | refs/heads/master | desktop/core/ext-py/lxml/src/lxml/html/defs.py | 36 | # FIXME: this should all be confirmed against what a DTD says
# (probably in a test; this may not match the DTD exactly, but we
# should document just how it differs).
# Data taken from http://www.w3.org/TR/html401/index/elements.html
try:
frozenset
except NameError:
from sets import Set as frozenset
empty_tags = frozenset([
'area', 'base', 'basefont', 'br', 'col', 'frame', 'hr',
'img', 'input', 'isindex', 'link', 'meta', 'param'])
deprecated_tags = frozenset([
'applet', 'basefont', 'center', 'dir', 'font', 'isindex',
'menu', 's', 'strike', 'u'])
# archive actually takes a space-separated list of URIs
link_attrs = frozenset([
'action', 'archive', 'background', 'cite', 'classid',
'codebase', 'data', 'href', 'longdesc', 'profile', 'src',
'usemap',
# Not standard:
'dynsrc', 'lowsrc',
])
# Not in the HTML 4 spec:
# onerror, onresize
event_attrs = frozenset([
'onblur', 'onchange', 'onclick', 'ondblclick', 'onerror',
'onfocus', 'onkeydown', 'onkeypress', 'onkeyup', 'onload',
'onmousedown', 'onmousemove', 'onmouseout', 'onmouseover',
'onmouseup', 'onreset', 'onresize', 'onselect', 'onsubmit',
'onunload',
])
safe_attrs = frozenset([
'abbr', 'accept', 'accept-charset', 'accesskey', 'action', 'align',
'alt', 'axis', 'border', 'cellpadding', 'cellspacing', 'char', 'charoff',
'charset', 'checked', 'cite', 'class', 'clear', 'cols', 'colspan',
'color', 'compact', 'coords', 'datetime', 'dir', 'disabled', 'enctype',
'for', 'frame', 'headers', 'height', 'href', 'hreflang', 'hspace', 'id',
'ismap', 'label', 'lang', 'longdesc', 'maxlength', 'media', 'method',
'multiple', 'name', 'nohref', 'noshade', 'nowrap', 'prompt', 'readonly',
'rel', 'rev', 'rows', 'rowspan', 'rules', 'scope', 'selected', 'shape',
'size', 'span', 'src', 'start', 'summary', 'tabindex', 'target', 'title',
'type', 'usemap', 'valign', 'value', 'vspace', 'width'])
# From http://htmlhelp.com/reference/html40/olist.html
top_level_tags = frozenset([
'html', 'head', 'body', 'frameset',
])
head_tags = frozenset([
'base', 'isindex', 'link', 'meta', 'script', 'style', 'title',
])
general_block_tags = frozenset([
'address',
'blockquote',
'center',
'del',
'div',
'h1',
'h2',
'h3',
'h4',
'h5',
'h6',
'hr',
'ins',
'isindex',
'noscript',
'p',
'pre',
])
list_tags = frozenset([
'dir', 'dl', 'dt', 'dd', 'li', 'menu', 'ol', 'ul',
])
table_tags = frozenset([
'table', 'caption', 'colgroup', 'col',
'thead', 'tfoot', 'tbody', 'tr', 'td', 'th',
])
# just this one from
# http://www.georgehernandez.com/h/XComputers/HTML/2BlockLevel.htm
block_tags = general_block_tags | list_tags | table_tags | frozenset([
# Partial form tags
'fieldset', 'form', 'legend', 'optgroup', 'option',
])
form_tags = frozenset([
'form', 'button', 'fieldset', 'legend', 'input', 'label',
'select', 'optgroup', 'option', 'textarea',
])
special_inline_tags = frozenset([
'a', 'applet', 'basefont', 'bdo', 'br', 'embed', 'font', 'iframe',
'img', 'map', 'area', 'object', 'param', 'q', 'script',
'span', 'sub', 'sup',
])
phrase_tags = frozenset([
'abbr', 'acronym', 'cite', 'code', 'del', 'dfn', 'em',
'ins', 'kbd', 'samp', 'strong', 'var',
])
font_style_tags = frozenset([
'b', 'big', 'i', 's', 'small', 'strike', 'tt', 'u',
])
frame_tags = frozenset([
'frameset', 'frame', 'noframes',
])
# These tags aren't standard
nonstandard_tags = frozenset(['blink', 'marque'])
tags = (top_level_tags | head_tags | general_block_tags | list_tags
| table_tags | form_tags | special_inline_tags | phrase_tags
| font_style_tags | nonstandard_tags)
|
jss-emr/openerp-7-src | refs/heads/master | openerp/cli/server.py | 3 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
"""
OpenERP - Server
OpenERP is an ERP+CRM program for small and medium businesses.
The whole source code is distributed under the terms of the
GNU Public Licence.
(c) 2003-TODAY, Fabien Pinckaers - OpenERP SA
"""
import logging
import os
import signal
import sys
import threading
import traceback
import time
import openerp
from . import Command
__author__ = openerp.release.author
__version__ = openerp.release.version
# Also use the `openerp` logger for the main script.
_logger = logging.getLogger('openerp')
def check_root_user():
""" Exit if the process's user is 'root' (on POSIX system)."""
if os.name == 'posix':
import pwd
if pwd.getpwuid(os.getuid())[0] == 'root' :
sys.stderr.write("Running as user 'root' is a security risk, aborting.\n")
sys.exit(1)
def check_postgres_user():
""" Exit if the configured database user is 'postgres'.
This function assumes the configuration has been initialized.
"""
config = openerp.tools.config
if config['db_user'] == 'postgres':
sys.stderr.write("Using the database user 'postgres' is a security risk, aborting.")
sys.exit(1)
def report_configuration():
""" Log the server version and some configuration values.
This function assumes the configuration has been initialized.
"""
config = openerp.tools.config
_logger.info("OpenERP version %s", __version__)
for name, value in [('addons paths', config['addons_path']),
('database hostname', config['db_host'] or 'localhost'),
('database port', config['db_port'] or '5432'),
('database user', config['db_user'])]:
_logger.info("%s: %s", name, value)
def setup_pid_file():
""" Create a file with the process id written in it.
This function assumes the configuration has been initialized.
"""
config = openerp.tools.config
if config['pidfile']:
fd = open(config['pidfile'], 'w')
pidtext = "%d" % (os.getpid())
fd.write(pidtext)
fd.close()
def preload_registry(dbname):
""" Preload a registry, and start the cron."""
try:
update_module = True if openerp.tools.config['init'] or openerp.tools.config['update'] else False
db, registry = openerp.pooler.get_db_and_pool(dbname,update_module=update_module)
except Exception:
_logger.exception('Failed to initialize database `%s`.', dbname)
def run_test_file(dbname, test_file):
""" Preload a registry, possibly run a test file, and start the cron."""
try:
config = openerp.tools.config
db, registry = openerp.pooler.get_db_and_pool(dbname, update_module=config['init'] or config['update'])
cr = db.cursor()
_logger.info('loading test file %s', test_file)
openerp.tools.convert_yaml_import(cr, 'base', file(test_file), 'test', {}, 'test', True)
cr.rollback()
cr.close()
except Exception:
_logger.exception('Failed to initialize database `%s` and run test file `%s`.', dbname, test_file)
def export_translation():
config = openerp.tools.config
dbname = config['db_name']
if config["language"]:
msg = "language %s" % (config["language"],)
else:
msg = "new language"
_logger.info('writing translation file for %s to %s', msg,
config["translate_out"])
fileformat = os.path.splitext(config["translate_out"])[-1][1:].lower()
buf = file(config["translate_out"], "w")
cr = openerp.pooler.get_db(dbname).cursor()
openerp.tools.trans_export(config["language"],
config["translate_modules"] or ["all"], buf, fileformat, cr)
cr.close()
buf.close()
_logger.info('translation file written successfully')
def import_translation():
config = openerp.tools.config
context = {'overwrite': config["overwrite_existing_translations"]}
dbname = config['db_name']
cr = openerp.pooler.get_db(dbname).cursor()
openerp.tools.trans_load( cr, config["translate_in"], config["language"],
context=context)
cr.commit()
cr.close()
# Variable keeping track of the number of calls to the signal handler defined
# below. This variable is monitored by ``quit_on_signals()``.
quit_signals_received = 0
def signal_handler(sig, frame):
""" Signal handler: exit ungracefully on the second handled signal.
:param sig: the signal number
:param frame: the interrupted stack frame or None
"""
global quit_signals_received
quit_signals_received += 1
if quit_signals_received > 1:
# logging.shutdown was already called at this point.
sys.stderr.write("Forced shutdown.\n")
os._exit(0)
def dumpstacks(sig, frame):
""" Signal handler: dump a stack trace for each existing thread."""
# code from http://stackoverflow.com/questions/132058/getting-stack-trace-from-a-running-python-application#answer-2569696
# modified for python 2.5 compatibility
threads_info = dict([(th.ident, {'name': th.name,
'uid': getattr(th,'uid','n/a')})
for th in threading.enumerate()])
code = []
for threadId, stack in sys._current_frames().items():
thread_info = threads_info.get(threadId)
code.append("\n# Thread: %s (id:%s) (uid:%s)" % \
(thread_info and thread_info['name'] or 'n/a',
threadId,
thread_info and thread_info['uid'] or 'n/a'))
for filename, lineno, name, line in traceback.extract_stack(stack):
code.append('File: "%s", line %d, in %s' % (filename, lineno, name))
if line:
code.append(" %s" % (line.strip()))
_logger.info("\n".join(code))
def setup_signal_handlers():
""" Register the signal handler defined above. """
SIGNALS = map(lambda x: getattr(signal, "SIG%s" % x), "INT TERM".split())
if os.name == 'posix':
map(lambda sig: signal.signal(sig, signal_handler), SIGNALS)
signal.signal(signal.SIGQUIT, dumpstacks)
elif os.name == 'nt':
import win32api
win32api.SetConsoleCtrlHandler(lambda sig: signal_handler(sig, None), 1)
def quit_on_signals():
""" Wait for one or two signals then shutdown the server.
The first SIGINT or SIGTERM signal will initiate a graceful shutdown while
a second one if any will force an immediate exit.
"""
# Wait for a first signal to be handled. (time.sleep will be interrupted
# by the signal handler.) The try/except is for the win32 case.
try:
while quit_signals_received == 0:
time.sleep(60)
except KeyboardInterrupt:
pass
config = openerp.tools.config
openerp.service.stop_services()
if getattr(openerp, 'phoenix', False):
# like the phoenix, reborn from ashes...
openerp.service._reexec()
return
if config['pidfile']:
os.unlink(config['pidfile'])
sys.exit(0)
def configure_babel_localedata_path():
# Workaround: py2exe and babel.
if hasattr(sys, 'frozen'):
import babel
babel.localedata._dirname = os.path.join(os.path.dirname(sys.executable), 'localedata')
def main(args):
os.environ["TZ"] = "UTC"
check_root_user()
openerp.tools.config.parse_config(args)
check_postgres_user()
openerp.netsvc.init_logger()
report_configuration()
config = openerp.tools.config
configure_babel_localedata_path()
setup_signal_handlers()
if config["test_file"]:
run_test_file(config['db_name'], config['test_file'])
sys.exit(0)
if config["translate_out"]:
export_translation()
sys.exit(0)
if config["translate_in"]:
import_translation()
sys.exit(0)
if not config["stop_after_init"]:
setup_pid_file()
# Some module register themselves when they are loaded so we need the
# services to be running before loading any registry.
if config['workers']:
openerp.service.start_services_workers()
else:
openerp.service.start_services()
if config['db_name']:
for dbname in config['db_name'].split(','):
preload_registry(dbname)
if config["stop_after_init"]:
sys.exit(0)
_logger.info('OpenERP server is running, waiting for connections...')
quit_on_signals()
class Server(Command):
def run(self, args):
main(args)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
daxxi13/CouchPotatoServer | refs/heads/develop | libs/oauthlib/oauth1/rfc5849/__init__.py | 112 | # -*- coding: utf-8 -*-
from __future__ import absolute_import
"""
oauthlib.oauth1.rfc5849
~~~~~~~~~~~~~~
This module is an implementation of various logic needed
for signing and checking OAuth 1.0 RFC 5849 requests.
"""
import logging
import urlparse
from oauthlib.common import Request, urlencode
from . import parameters, signature, utils
SIGNATURE_HMAC = u"HMAC-SHA1"
SIGNATURE_RSA = u"RSA-SHA1"
SIGNATURE_PLAINTEXT = u"PLAINTEXT"
SIGNATURE_METHODS = (SIGNATURE_HMAC, SIGNATURE_RSA, SIGNATURE_PLAINTEXT)
SIGNATURE_TYPE_AUTH_HEADER = u'AUTH_HEADER'
SIGNATURE_TYPE_QUERY = u'QUERY'
SIGNATURE_TYPE_BODY = u'BODY'
CONTENT_TYPE_FORM_URLENCODED = u'application/x-www-form-urlencoded'
class Client(object):
"""A client used to sign OAuth 1.0 RFC 5849 requests"""
def __init__(self, client_key,
client_secret=None,
resource_owner_key=None,
resource_owner_secret=None,
callback_uri=None,
signature_method=SIGNATURE_HMAC,
signature_type=SIGNATURE_TYPE_AUTH_HEADER,
rsa_key=None, verifier=None):
self.client_key = client_key
self.client_secret = client_secret
self.resource_owner_key = resource_owner_key
self.resource_owner_secret = resource_owner_secret
self.signature_method = signature_method
self.signature_type = signature_type
self.callback_uri = callback_uri
self.rsa_key = rsa_key
self.verifier = verifier
if self.signature_method == SIGNATURE_RSA and self.rsa_key is None:
raise ValueError('rsa_key is required when using RSA signature method.')
def get_oauth_signature(self, request):
"""Get an OAuth signature to be used in signing a request
"""
if self.signature_method == SIGNATURE_PLAINTEXT:
# fast-path
return signature.sign_plaintext(self.client_secret,
self.resource_owner_secret)
uri, headers, body = self._render(request)
collected_params = signature.collect_parameters(
uri_query=urlparse.urlparse(uri).query,
body=body,
headers=headers)
logging.debug("Collected params: {0}".format(collected_params))
normalized_params = signature.normalize_parameters(collected_params)
normalized_uri = signature.normalize_base_string_uri(request.uri)
logging.debug("Normalized params: {0}".format(normalized_params))
logging.debug("Normalized URI: {0}".format(normalized_uri))
base_string = signature.construct_base_string(request.http_method,
normalized_uri, normalized_params)
logging.debug("Base signing string: {0}".format(base_string))
if self.signature_method == SIGNATURE_HMAC:
sig = signature.sign_hmac_sha1(base_string, self.client_secret,
self.resource_owner_secret)
elif self.signature_method == SIGNATURE_RSA:
sig = signature.sign_rsa_sha1(base_string, self.rsa_key)
else:
sig = signature.sign_plaintext(self.client_secret,
self.resource_owner_secret)
logging.debug("Signature: {0}".format(sig))
return sig
def get_oauth_params(self):
"""Get the basic OAuth parameters to be used in generating a signature.
"""
params = [
(u'oauth_nonce', utils.generate_nonce()),
(u'oauth_timestamp', utils.generate_timestamp()),
(u'oauth_version', u'1.0'),
(u'oauth_signature_method', self.signature_method),
(u'oauth_consumer_key', self.client_key),
]
if self.resource_owner_key:
params.append((u'oauth_token', self.resource_owner_key))
if self.callback_uri:
params.append((u'oauth_callback', self.callback_uri))
if self.verifier:
params.append((u'oauth_verifier', self.verifier))
return params
def _render(self, request, formencode=False):
"""Render a signed request according to signature type
Returns a 3-tuple containing the request URI, headers, and body.
If the formencode argument is True and the body contains parameters, it
is escaped and returned as a valid formencoded string.
"""
# TODO what if there are body params on a header-type auth?
# TODO what if there are query params on a body-type auth?
uri, headers, body = request.uri, request.headers, request.body
# TODO: right now these prepare_* methods are very narrow in scope--they
# only affect their little thing. In some cases (for example, with
# header auth) it might be advantageous to allow these methods to touch
# other parts of the request, like the headers—so the prepare_headers
# method could also set the Content-Type header to x-www-form-urlencoded
# like the spec requires. This would be a fundamental change though, and
# I'm not sure how I feel about it.
if self.signature_type == SIGNATURE_TYPE_AUTH_HEADER:
headers = parameters.prepare_headers(request.oauth_params, request.headers)
elif self.signature_type == SIGNATURE_TYPE_BODY and request.decoded_body is not None:
body = parameters.prepare_form_encoded_body(request.oauth_params, request.decoded_body)
if formencode:
body = urlencode(body)
headers['Content-Type'] = u'application/x-www-form-urlencoded'
elif self.signature_type == SIGNATURE_TYPE_QUERY:
uri = parameters.prepare_request_uri_query(request.oauth_params, request.uri)
else:
raise ValueError('Unknown signature type specified.')
return uri, headers, body
def sign(self, uri, http_method=u'GET', body=None, headers=None):
"""Sign a request
Signs an HTTP request with the specified parts.
Returns a 3-tuple of the signed request's URI, headers, and body.
Note that http_method is not returned as it is unaffected by the OAuth
signing process.
The body argument may be a dict, a list of 2-tuples, or a formencoded
string. The Content-Type header must be 'application/x-www-form-urlencoded'
if it is present.
If the body argument is not one of the above, it will be returned
verbatim as it is unaffected by the OAuth signing process. Attempting to
sign a request with non-formencoded data using the OAuth body signature
type is invalid and will raise an exception.
If the body does contain parameters, it will be returned as a properly-
formatted formencoded string.
All string data MUST be unicode. This includes strings inside body
dicts, for example.
"""
# normalize request data
request = Request(uri, http_method, body, headers)
# sanity check
content_type = request.headers.get('Content-Type', None)
multipart = content_type and content_type.startswith('multipart/')
should_have_params = content_type == CONTENT_TYPE_FORM_URLENCODED
has_params = request.decoded_body is not None
# 3.4.1.3.1. Parameter Sources
# [Parameters are collected from the HTTP request entity-body, but only
# if [...]:
# * The entity-body is single-part.
if multipart and has_params:
raise ValueError("Headers indicate a multipart body but body contains parameters.")
# * The entity-body follows the encoding requirements of the
# "application/x-www-form-urlencoded" content-type as defined by
# [W3C.REC-html40-19980424].
elif should_have_params and not has_params:
raise ValueError("Headers indicate a formencoded body but body was not decodable.")
# * The HTTP request entity-header includes the "Content-Type"
# header field set to "application/x-www-form-urlencoded".
elif not should_have_params and has_params:
raise ValueError("Body contains parameters but Content-Type header was not set.")
# 3.5.2. Form-Encoded Body
# Protocol parameters can be transmitted in the HTTP request entity-
# body, but only if the following REQUIRED conditions are met:
# o The entity-body is single-part.
# o The entity-body follows the encoding requirements of the
# "application/x-www-form-urlencoded" content-type as defined by
# [W3C.REC-html40-19980424].
# o The HTTP request entity-header includes the "Content-Type" header
# field set to "application/x-www-form-urlencoded".
elif self.signature_type == SIGNATURE_TYPE_BODY and not (
should_have_params and has_params and not multipart):
raise ValueError('Body signatures may only be used with form-urlencoded content')
# generate the basic OAuth parameters
request.oauth_params = self.get_oauth_params()
# generate the signature
request.oauth_params.append((u'oauth_signature', self.get_oauth_signature(request)))
# render the signed request and return it
return self._render(request, formencode=True)
class Server(object):
"""A server used to verify OAuth 1.0 RFC 5849 requests"""
def __init__(self, signature_method=SIGNATURE_HMAC, rsa_key=None):
self.signature_method = signature_method
self.rsa_key = rsa_key
def get_client_secret(self, client_key):
raise NotImplementedError("Subclasses must implement this function.")
def get_resource_owner_secret(self, resource_owner_key):
raise NotImplementedError("Subclasses must implement this function.")
def get_signature_type_and_params(self, uri_query, headers, body):
signature_types_with_oauth_params = filter(lambda s: s[1], (
(SIGNATURE_TYPE_AUTH_HEADER, utils.filter_oauth_params(
signature.collect_parameters(headers=headers,
exclude_oauth_signature=False))),
(SIGNATURE_TYPE_BODY, utils.filter_oauth_params(
signature.collect_parameters(body=body,
exclude_oauth_signature=False))),
(SIGNATURE_TYPE_QUERY, utils.filter_oauth_params(
signature.collect_parameters(uri_query=uri_query,
exclude_oauth_signature=False))),
))
if len(signature_types_with_oauth_params) > 1:
raise ValueError('oauth_ params must come from only 1 signature type but were found in %s' % ', '.join(
[s[0] for s in signature_types_with_oauth_params]))
try:
signature_type, params = signature_types_with_oauth_params[0]
except IndexError:
raise ValueError('oauth_ params are missing. Could not determine signature type.')
return signature_type, dict(params)
def check_client_key(self, client_key):
raise NotImplementedError("Subclasses must implement this function.")
def check_resource_owner_key(self, client_key, resource_owner_key):
raise NotImplementedError("Subclasses must implement this function.")
def check_timestamp_and_nonce(self, timestamp, nonce):
raise NotImplementedError("Subclasses must implement this function.")
def check_request_signature(self, uri, http_method=u'GET', body='',
headers=None):
"""Check a request's supplied signature to make sure the request is
valid.
Servers should return HTTP status 400 if a ValueError exception
is raised and HTTP status 401 on return value False.
Per `section 3.2`_ of the spec.
.. _`section 3.2`: http://tools.ietf.org/html/rfc5849#section-3.2
"""
headers = headers or {}
signature_type = None
# FIXME: urlparse does not return unicode!
uri_query = urlparse.urlparse(uri).query
signature_type, params = self.get_signature_type_and_params(uri_query,
headers, body)
# the parameters may not include duplicate oauth entries
filtered_params = utils.filter_oauth_params(params)
if len(filtered_params) != len(params):
raise ValueError("Duplicate OAuth entries.")
params = dict(params)
request_signature = params.get(u'oauth_signature')
client_key = params.get(u'oauth_consumer_key')
resource_owner_key = params.get(u'oauth_token')
nonce = params.get(u'oauth_nonce')
timestamp = params.get(u'oauth_timestamp')
callback_uri = params.get(u'oauth_callback')
verifier = params.get(u'oauth_verifier')
signature_method = params.get(u'oauth_signature_method')
# ensure all mandatory parameters are present
if not all((request_signature, client_key, nonce,
timestamp, signature_method)):
raise ValueError("Missing OAuth parameters.")
# if version is supplied, it must be "1.0"
if u'oauth_version' in params and params[u'oauth_version'] != u'1.0':
raise ValueError("Invalid OAuth version.")
# signature method must be valid
if not signature_method in SIGNATURE_METHODS:
raise ValueError("Invalid signature method.")
# ensure client key is valid
if not self.check_client_key(client_key):
return False
# ensure resource owner key is valid and not expired
if not self.check_resource_owner_key(client_key, resource_owner_key):
return False
# ensure the nonce and timestamp haven't been used before
if not self.check_timestamp_and_nonce(timestamp, nonce):
return False
# FIXME: extract realm, then self.check_realm
# oauth_client parameters depend on client chosen signature method
# which may vary for each request, section 3.4
# HMAC-SHA1 and PLAINTEXT share parameters
if signature_method == SIGNATURE_RSA:
oauth_client = Client(client_key,
resource_owner_key=resource_owner_key,
callback_uri=callback_uri,
signature_method=signature_method,
signature_type=signature_type,
rsa_key=self.rsa_key, verifier=verifier)
else:
client_secret = self.get_client_secret(client_key)
resource_owner_secret = self.get_resource_owner_secret(
resource_owner_key)
oauth_client = Client(client_key,
client_secret=client_secret,
resource_owner_key=resource_owner_key,
resource_owner_secret=resource_owner_secret,
callback_uri=callback_uri,
signature_method=signature_method,
signature_type=signature_type,
verifier=verifier)
request = Request(uri, http_method, body, headers)
request.oauth_params = params
client_signature = oauth_client.get_oauth_signature(request)
# FIXME: use near constant time string compare to avoid timing attacks
return client_signature == request_signature
|
nwjs/chromium.src | refs/heads/nw45-log | third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/_inputstream.py | 45 | from __future__ import absolute_import, division, unicode_literals
from six import text_type, binary_type
from six.moves import http_client, urllib
import codecs
import re
import webencodings
from .constants import EOF, spaceCharacters, asciiLetters, asciiUppercase
from .constants import _ReparseException
from . import _utils
from io import StringIO
try:
from io import BytesIO
except ImportError:
BytesIO = StringIO
# Non-unicode versions of constants for use in the pre-parser
spaceCharactersBytes = frozenset([item.encode("ascii") for item in spaceCharacters])
asciiLettersBytes = frozenset([item.encode("ascii") for item in asciiLetters])
asciiUppercaseBytes = frozenset([item.encode("ascii") for item in asciiUppercase])
spacesAngleBrackets = spaceCharactersBytes | frozenset([b">", b"<"])
invalid_unicode_no_surrogate = "[\u0001-\u0008\u000B\u000E-\u001F\u007F-\u009F\uFDD0-\uFDEF\uFFFE\uFFFF\U0001FFFE\U0001FFFF\U0002FFFE\U0002FFFF\U0003FFFE\U0003FFFF\U0004FFFE\U0004FFFF\U0005FFFE\U0005FFFF\U0006FFFE\U0006FFFF\U0007FFFE\U0007FFFF\U0008FFFE\U0008FFFF\U0009FFFE\U0009FFFF\U000AFFFE\U000AFFFF\U000BFFFE\U000BFFFF\U000CFFFE\U000CFFFF\U000DFFFE\U000DFFFF\U000EFFFE\U000EFFFF\U000FFFFE\U000FFFFF\U0010FFFE\U0010FFFF]" # noqa
if _utils.supports_lone_surrogates:
# Use one extra step of indirection and create surrogates with
# eval. Not using this indirection would introduce an illegal
# unicode literal on platforms not supporting such lone
# surrogates.
assert invalid_unicode_no_surrogate[-1] == "]" and invalid_unicode_no_surrogate.count("]") == 1
invalid_unicode_re = re.compile(invalid_unicode_no_surrogate[:-1] +
eval('"\\uD800-\\uDFFF"') + # pylint:disable=eval-used
"]")
else:
invalid_unicode_re = re.compile(invalid_unicode_no_surrogate)
non_bmp_invalid_codepoints = set([0x1FFFE, 0x1FFFF, 0x2FFFE, 0x2FFFF, 0x3FFFE,
0x3FFFF, 0x4FFFE, 0x4FFFF, 0x5FFFE, 0x5FFFF,
0x6FFFE, 0x6FFFF, 0x7FFFE, 0x7FFFF, 0x8FFFE,
0x8FFFF, 0x9FFFE, 0x9FFFF, 0xAFFFE, 0xAFFFF,
0xBFFFE, 0xBFFFF, 0xCFFFE, 0xCFFFF, 0xDFFFE,
0xDFFFF, 0xEFFFE, 0xEFFFF, 0xFFFFE, 0xFFFFF,
0x10FFFE, 0x10FFFF])
ascii_punctuation_re = re.compile("[\u0009-\u000D\u0020-\u002F\u003A-\u0040\u005C\u005B-\u0060\u007B-\u007E]")
# Cache for charsUntil()
charsUntilRegEx = {}
class BufferedStream(object):
"""Buffering for streams that do not have buffering of their own
The buffer is implemented as a list of chunks on the assumption that
joining many strings will be slow since it is O(n**2)
"""
def __init__(self, stream):
self.stream = stream
self.buffer = []
self.position = [-1, 0] # chunk number, offset
def tell(self):
pos = 0
for chunk in self.buffer[:self.position[0]]:
pos += len(chunk)
pos += self.position[1]
return pos
def seek(self, pos):
assert pos <= self._bufferedBytes()
offset = pos
i = 0
while len(self.buffer[i]) < offset:
offset -= len(self.buffer[i])
i += 1
self.position = [i, offset]
def read(self, bytes):
if not self.buffer:
return self._readStream(bytes)
elif (self.position[0] == len(self.buffer) and
self.position[1] == len(self.buffer[-1])):
return self._readStream(bytes)
else:
return self._readFromBuffer(bytes)
def _bufferedBytes(self):
return sum([len(item) for item in self.buffer])
def _readStream(self, bytes):
data = self.stream.read(bytes)
self.buffer.append(data)
self.position[0] += 1
self.position[1] = len(data)
return data
def _readFromBuffer(self, bytes):
remainingBytes = bytes
rv = []
bufferIndex = self.position[0]
bufferOffset = self.position[1]
while bufferIndex < len(self.buffer) and remainingBytes != 0:
assert remainingBytes > 0
bufferedData = self.buffer[bufferIndex]
if remainingBytes <= len(bufferedData) - bufferOffset:
bytesToRead = remainingBytes
self.position = [bufferIndex, bufferOffset + bytesToRead]
else:
bytesToRead = len(bufferedData) - bufferOffset
self.position = [bufferIndex, len(bufferedData)]
bufferIndex += 1
rv.append(bufferedData[bufferOffset:bufferOffset + bytesToRead])
remainingBytes -= bytesToRead
bufferOffset = 0
if remainingBytes:
rv.append(self._readStream(remainingBytes))
return b"".join(rv)
def HTMLInputStream(source, **kwargs):
# Work around Python bug #20007: read(0) closes the connection.
# http://bugs.python.org/issue20007
if (isinstance(source, http_client.HTTPResponse) or
# Also check for addinfourl wrapping HTTPResponse
(isinstance(source, urllib.response.addbase) and
isinstance(source.fp, http_client.HTTPResponse))):
isUnicode = False
elif hasattr(source, "read"):
isUnicode = isinstance(source.read(0), text_type)
else:
isUnicode = isinstance(source, text_type)
if isUnicode:
encodings = [x for x in kwargs if x.endswith("_encoding")]
if encodings:
raise TypeError("Cannot set an encoding with a unicode input, set %r" % encodings)
return HTMLUnicodeInputStream(source, **kwargs)
else:
return HTMLBinaryInputStream(source, **kwargs)
class HTMLUnicodeInputStream(object):
"""Provides a unicode stream of characters to the HTMLTokenizer.
This class takes care of character encoding and removing or replacing
incorrect byte-sequences and also provides column and line tracking.
"""
_defaultChunkSize = 10240
def __init__(self, source):
"""Initialises the HTMLInputStream.
HTMLInputStream(source, [encoding]) -> Normalized stream from source
for use by html5lib.
source can be either a file-object, local filename or a string.
The optional encoding parameter must be a string that indicates
the encoding. If specified, that encoding will be used,
regardless of any BOM or later declaration (such as in a meta
element)
"""
if not _utils.supports_lone_surrogates:
# Such platforms will have already checked for such
# surrogate errors, so no need to do this checking.
self.reportCharacterErrors = None
elif len("\U0010FFFF") == 1:
self.reportCharacterErrors = self.characterErrorsUCS4
else:
self.reportCharacterErrors = self.characterErrorsUCS2
# List of where new lines occur
self.newLines = [0]
self.charEncoding = (lookupEncoding("utf-8"), "certain")
self.dataStream = self.openStream(source)
self.reset()
def reset(self):
self.chunk = ""
self.chunkSize = 0
self.chunkOffset = 0
self.errors = []
# number of (complete) lines in previous chunks
self.prevNumLines = 0
# number of columns in the last line of the previous chunk
self.prevNumCols = 0
# Deal with CR LF and surrogates split over chunk boundaries
self._bufferedCharacter = None
def openStream(self, source):
"""Produces a file object from source.
source can be either a file object, local filename or a string.
"""
# Already a file object
if hasattr(source, 'read'):
stream = source
else:
stream = StringIO(source)
return stream
def _position(self, offset):
chunk = self.chunk
nLines = chunk.count('\n', 0, offset)
positionLine = self.prevNumLines + nLines
lastLinePos = chunk.rfind('\n', 0, offset)
if lastLinePos == -1:
positionColumn = self.prevNumCols + offset
else:
positionColumn = offset - (lastLinePos + 1)
return (positionLine, positionColumn)
def position(self):
"""Returns (line, col) of the current position in the stream."""
line, col = self._position(self.chunkOffset)
return (line + 1, col)
def char(self):
""" Read one character from the stream or queue if available. Return
EOF when EOF is reached.
"""
# Read a new chunk from the input stream if necessary
if self.chunkOffset >= self.chunkSize:
if not self.readChunk():
return EOF
chunkOffset = self.chunkOffset
char = self.chunk[chunkOffset]
self.chunkOffset = chunkOffset + 1
return char
def readChunk(self, chunkSize=None):
if chunkSize is None:
chunkSize = self._defaultChunkSize
self.prevNumLines, self.prevNumCols = self._position(self.chunkSize)
self.chunk = ""
self.chunkSize = 0
self.chunkOffset = 0
data = self.dataStream.read(chunkSize)
# Deal with CR LF and surrogates broken across chunks
if self._bufferedCharacter:
data = self._bufferedCharacter + data
self._bufferedCharacter = None
elif not data:
# We have no more data, bye-bye stream
return False
if len(data) > 1:
lastv = ord(data[-1])
if lastv == 0x0D or 0xD800 <= lastv <= 0xDBFF:
self._bufferedCharacter = data[-1]
data = data[:-1]
if self.reportCharacterErrors:
self.reportCharacterErrors(data)
# Replace invalid characters
data = data.replace("\r\n", "\n")
data = data.replace("\r", "\n")
self.chunk = data
self.chunkSize = len(data)
return True
def characterErrorsUCS4(self, data):
for _ in range(len(invalid_unicode_re.findall(data))):
self.errors.append("invalid-codepoint")
def characterErrorsUCS2(self, data):
# Someone picked the wrong compile option
# You lose
skip = False
for match in invalid_unicode_re.finditer(data):
if skip:
continue
codepoint = ord(match.group())
pos = match.start()
# Pretty sure there should be endianness issues here
if _utils.isSurrogatePair(data[pos:pos + 2]):
# We have a surrogate pair!
char_val = _utils.surrogatePairToCodepoint(data[pos:pos + 2])
if char_val in non_bmp_invalid_codepoints:
self.errors.append("invalid-codepoint")
skip = True
elif (codepoint >= 0xD800 and codepoint <= 0xDFFF and
pos == len(data) - 1):
self.errors.append("invalid-codepoint")
else:
skip = False
self.errors.append("invalid-codepoint")
def charsUntil(self, characters, opposite=False):
""" Returns a string of characters from the stream up to but not
including any character in 'characters' or EOF. 'characters' must be
a container that supports the 'in' method and iteration over its
characters.
"""
# Use a cache of regexps to find the required characters
try:
chars = charsUntilRegEx[(characters, opposite)]
except KeyError:
if __debug__:
for c in characters:
assert(ord(c) < 128)
regex = "".join(["\\x%02x" % ord(c) for c in characters])
if not opposite:
regex = "^%s" % regex
chars = charsUntilRegEx[(characters, opposite)] = re.compile("[%s]+" % regex)
rv = []
while True:
# Find the longest matching prefix
m = chars.match(self.chunk, self.chunkOffset)
if m is None:
# If nothing matched, and it wasn't because we ran out of chunk,
# then stop
if self.chunkOffset != self.chunkSize:
break
else:
end = m.end()
# If not the whole chunk matched, return everything
# up to the part that didn't match
if end != self.chunkSize:
rv.append(self.chunk[self.chunkOffset:end])
self.chunkOffset = end
break
# If the whole remainder of the chunk matched,
# use it all and read the next chunk
rv.append(self.chunk[self.chunkOffset:])
if not self.readChunk():
# Reached EOF
break
r = "".join(rv)
return r
def unget(self, char):
# Only one character is allowed to be ungotten at once - it must
# be consumed again before any further call to unget
if char is not None:
if self.chunkOffset == 0:
# unget is called quite rarely, so it's a good idea to do
# more work here if it saves a bit of work in the frequently
# called char and charsUntil.
# So, just prepend the ungotten character onto the current
# chunk:
self.chunk = char + self.chunk
self.chunkSize += 1
else:
self.chunkOffset -= 1
assert self.chunk[self.chunkOffset] == char
class HTMLBinaryInputStream(HTMLUnicodeInputStream):
"""Provides a unicode stream of characters to the HTMLTokenizer.
This class takes care of character encoding and removing or replacing
incorrect byte-sequences and also provides column and line tracking.
"""
def __init__(self, source, override_encoding=None, transport_encoding=None,
same_origin_parent_encoding=None, likely_encoding=None,
default_encoding="windows-1252", useChardet=True):
"""Initialises the HTMLInputStream.
HTMLInputStream(source, [encoding]) -> Normalized stream from source
for use by html5lib.
source can be either a file-object, local filename or a string.
The optional encoding parameter must be a string that indicates
the encoding. If specified, that encoding will be used,
regardless of any BOM or later declaration (such as in a meta
element)
"""
# Raw Stream - for unicode objects this will encode to utf-8 and set
# self.charEncoding as appropriate
self.rawStream = self.openStream(source)
HTMLUnicodeInputStream.__init__(self, self.rawStream)
# Encoding Information
# Number of bytes to use when looking for a meta element with
# encoding information
self.numBytesMeta = 1024
# Number of bytes to use when using detecting encoding using chardet
self.numBytesChardet = 100
# Things from args
self.override_encoding = override_encoding
self.transport_encoding = transport_encoding
self.same_origin_parent_encoding = same_origin_parent_encoding
self.likely_encoding = likely_encoding
self.default_encoding = default_encoding
# Determine encoding
self.charEncoding = self.determineEncoding(useChardet)
assert self.charEncoding[0] is not None
# Call superclass
self.reset()
def reset(self):
self.dataStream = self.charEncoding[0].codec_info.streamreader(self.rawStream, 'replace')
HTMLUnicodeInputStream.reset(self)
def openStream(self, source):
"""Produces a file object from source.
source can be either a file object, local filename or a string.
"""
# Already a file object
if hasattr(source, 'read'):
stream = source
else:
stream = BytesIO(source)
try:
stream.seek(stream.tell())
except: # pylint:disable=bare-except
stream = BufferedStream(stream)
return stream
def determineEncoding(self, chardet=True):
# BOMs take precedence over everything
# This will also read past the BOM if present
charEncoding = self.detectBOM(), "certain"
if charEncoding[0] is not None:
return charEncoding
# If we've been overriden, we've been overriden
charEncoding = lookupEncoding(self.override_encoding), "certain"
if charEncoding[0] is not None:
return charEncoding
# Now check the transport layer
charEncoding = lookupEncoding(self.transport_encoding), "certain"
if charEncoding[0] is not None:
return charEncoding
# Look for meta elements with encoding information
charEncoding = self.detectEncodingMeta(), "tentative"
if charEncoding[0] is not None:
return charEncoding
# Parent document encoding
charEncoding = lookupEncoding(self.same_origin_parent_encoding), "tentative"
if charEncoding[0] is not None and not charEncoding[0].name.startswith("utf-16"):
return charEncoding
# "likely" encoding
charEncoding = lookupEncoding(self.likely_encoding), "tentative"
if charEncoding[0] is not None:
return charEncoding
# Guess with chardet, if available
if chardet:
try:
from chardet.universaldetector import UniversalDetector
except ImportError:
pass
else:
buffers = []
detector = UniversalDetector()
while not detector.done:
buffer = self.rawStream.read(self.numBytesChardet)
assert isinstance(buffer, bytes)
if not buffer:
break
buffers.append(buffer)
detector.feed(buffer)
detector.close()
encoding = lookupEncoding(detector.result['encoding'])
self.rawStream.seek(0)
if encoding is not None:
return encoding, "tentative"
# Try the default encoding
charEncoding = lookupEncoding(self.default_encoding), "tentative"
if charEncoding[0] is not None:
return charEncoding
# Fallback to html5lib's default if even that hasn't worked
return lookupEncoding("windows-1252"), "tentative"
def changeEncoding(self, newEncoding):
assert self.charEncoding[1] != "certain"
newEncoding = lookupEncoding(newEncoding)
if newEncoding is None:
return
if newEncoding.name in ("utf-16be", "utf-16le"):
newEncoding = lookupEncoding("utf-8")
assert newEncoding is not None
elif newEncoding == self.charEncoding[0]:
self.charEncoding = (self.charEncoding[0], "certain")
else:
self.rawStream.seek(0)
self.charEncoding = (newEncoding, "certain")
self.reset()
raise _ReparseException("Encoding changed from %s to %s" % (self.charEncoding[0], newEncoding))
def detectBOM(self):
"""Attempts to detect at BOM at the start of the stream. If
an encoding can be determined from the BOM return the name of the
encoding otherwise return None"""
bomDict = {
codecs.BOM_UTF8: 'utf-8',
codecs.BOM_UTF16_LE: 'utf-16le', codecs.BOM_UTF16_BE: 'utf-16be',
codecs.BOM_UTF32_LE: 'utf-32le', codecs.BOM_UTF32_BE: 'utf-32be'
}
# Go to beginning of file and read in 4 bytes
string = self.rawStream.read(4)
assert isinstance(string, bytes)
# Try detecting the BOM using bytes from the string
encoding = bomDict.get(string[:3]) # UTF-8
seek = 3
if not encoding:
# Need to detect UTF-32 before UTF-16
encoding = bomDict.get(string) # UTF-32
seek = 4
if not encoding:
encoding = bomDict.get(string[:2]) # UTF-16
seek = 2
# Set the read position past the BOM if one was found, otherwise
# set it to the start of the stream
if encoding:
self.rawStream.seek(seek)
return lookupEncoding(encoding)
else:
self.rawStream.seek(0)
return None
def detectEncodingMeta(self):
"""Report the encoding declared by the meta element
"""
buffer = self.rawStream.read(self.numBytesMeta)
assert isinstance(buffer, bytes)
parser = EncodingParser(buffer)
self.rawStream.seek(0)
encoding = parser.getEncoding()
if encoding is not None and encoding.name in ("utf-16be", "utf-16le"):
encoding = lookupEncoding("utf-8")
return encoding
class EncodingBytes(bytes):
"""String-like object with an associated position and various extra methods
If the position is ever greater than the string length then an exception is
raised"""
def __new__(self, value):
assert isinstance(value, bytes)
return bytes.__new__(self, value.lower())
def __init__(self, value):
# pylint:disable=unused-argument
self._position = -1
def __iter__(self):
return self
def __next__(self):
p = self._position = self._position + 1
if p >= len(self):
raise StopIteration
elif p < 0:
raise TypeError
return self[p:p + 1]
def next(self):
# Py2 compat
return self.__next__()
def previous(self):
p = self._position
if p >= len(self):
raise StopIteration
elif p < 0:
raise TypeError
self._position = p = p - 1
return self[p:p + 1]
def setPosition(self, position):
if self._position >= len(self):
raise StopIteration
self._position = position
def getPosition(self):
if self._position >= len(self):
raise StopIteration
if self._position >= 0:
return self._position
else:
return None
position = property(getPosition, setPosition)
def getCurrentByte(self):
return self[self.position:self.position + 1]
currentByte = property(getCurrentByte)
def skip(self, chars=spaceCharactersBytes):
"""Skip past a list of characters"""
p = self.position # use property for the error-checking
while p < len(self):
c = self[p:p + 1]
if c not in chars:
self._position = p
return c
p += 1
self._position = p
return None
def skipUntil(self, chars):
p = self.position
while p < len(self):
c = self[p:p + 1]
if c in chars:
self._position = p
return c
p += 1
self._position = p
return None
def matchBytes(self, bytes):
"""Look for a sequence of bytes at the start of a string. If the bytes
are found return True and advance the position to the byte after the
match. Otherwise return False and leave the position alone"""
p = self.position
data = self[p:p + len(bytes)]
rv = data.startswith(bytes)
if rv:
self.position += len(bytes)
return rv
def jumpTo(self, bytes):
"""Look for the next sequence of bytes matching a given sequence. If
a match is found advance the position to the last byte of the match"""
newPosition = self[self.position:].find(bytes)
if newPosition > -1:
# XXX: This is ugly, but I can't see a nicer way to fix this.
if self._position == -1:
self._position = 0
self._position += (newPosition + len(bytes) - 1)
return True
else:
raise StopIteration
class EncodingParser(object):
"""Mini parser for detecting character encoding from meta elements"""
def __init__(self, data):
"""string - the data to work on for encoding detection"""
self.data = EncodingBytes(data)
self.encoding = None
def getEncoding(self):
methodDispatch = (
(b"<!--", self.handleComment),
(b"<meta", self.handleMeta),
(b"</", self.handlePossibleEndTag),
(b"<!", self.handleOther),
(b"<?", self.handleOther),
(b"<", self.handlePossibleStartTag))
for _ in self.data:
keepParsing = True
for key, method in methodDispatch:
if self.data.matchBytes(key):
try:
keepParsing = method()
break
except StopIteration:
keepParsing = False
break
if not keepParsing:
break
return self.encoding
def handleComment(self):
"""Skip over comments"""
return self.data.jumpTo(b"-->")
def handleMeta(self):
if self.data.currentByte not in spaceCharactersBytes:
# if we have <meta not followed by a space so just keep going
return True
# We have a valid meta element we want to search for attributes
hasPragma = False
pendingEncoding = None
while True:
# Try to find the next attribute after the current position
attr = self.getAttribute()
if attr is None:
return True
else:
if attr[0] == b"http-equiv":
hasPragma = attr[1] == b"content-type"
if hasPragma and pendingEncoding is not None:
self.encoding = pendingEncoding
return False
elif attr[0] == b"charset":
tentativeEncoding = attr[1]
codec = lookupEncoding(tentativeEncoding)
if codec is not None:
self.encoding = codec
return False
elif attr[0] == b"content":
contentParser = ContentAttrParser(EncodingBytes(attr[1]))
tentativeEncoding = contentParser.parse()
if tentativeEncoding is not None:
codec = lookupEncoding(tentativeEncoding)
if codec is not None:
if hasPragma:
self.encoding = codec
return False
else:
pendingEncoding = codec
def handlePossibleStartTag(self):
return self.handlePossibleTag(False)
def handlePossibleEndTag(self):
next(self.data)
return self.handlePossibleTag(True)
def handlePossibleTag(self, endTag):
data = self.data
if data.currentByte not in asciiLettersBytes:
# If the next byte is not an ascii letter either ignore this
# fragment (possible start tag case) or treat it according to
# handleOther
if endTag:
data.previous()
self.handleOther()
return True
c = data.skipUntil(spacesAngleBrackets)
if c == b"<":
# return to the first step in the overall "two step" algorithm
# reprocessing the < byte
data.previous()
else:
# Read all attributes
attr = self.getAttribute()
while attr is not None:
attr = self.getAttribute()
return True
def handleOther(self):
return self.data.jumpTo(b">")
def getAttribute(self):
"""Return a name,value pair for the next attribute in the stream,
if one is found, or None"""
data = self.data
# Step 1 (skip chars)
c = data.skip(spaceCharactersBytes | frozenset([b"/"]))
assert c is None or len(c) == 1
# Step 2
if c in (b">", None):
return None
# Step 3
attrName = []
attrValue = []
# Step 4 attribute name
while True:
if c == b"=" and attrName:
break
elif c in spaceCharactersBytes:
# Step 6!
c = data.skip()
break
elif c in (b"/", b">"):
return b"".join(attrName), b""
elif c in asciiUppercaseBytes:
attrName.append(c.lower())
elif c is None:
return None
else:
attrName.append(c)
# Step 5
c = next(data)
# Step 7
if c != b"=":
data.previous()
return b"".join(attrName), b""
# Step 8
next(data)
# Step 9
c = data.skip()
# Step 10
if c in (b"'", b'"'):
# 10.1
quoteChar = c
while True:
# 10.2
c = next(data)
# 10.3
if c == quoteChar:
next(data)
return b"".join(attrName), b"".join(attrValue)
# 10.4
elif c in asciiUppercaseBytes:
attrValue.append(c.lower())
# 10.5
else:
attrValue.append(c)
elif c == b">":
return b"".join(attrName), b""
elif c in asciiUppercaseBytes:
attrValue.append(c.lower())
elif c is None:
return None
else:
attrValue.append(c)
# Step 11
while True:
c = next(data)
if c in spacesAngleBrackets:
return b"".join(attrName), b"".join(attrValue)
elif c in asciiUppercaseBytes:
attrValue.append(c.lower())
elif c is None:
return None
else:
attrValue.append(c)
class ContentAttrParser(object):
def __init__(self, data):
assert isinstance(data, bytes)
self.data = data
def parse(self):
try:
# Check if the attr name is charset
# otherwise return
self.data.jumpTo(b"charset")
self.data.position += 1
self.data.skip()
if not self.data.currentByte == b"=":
# If there is no = sign keep looking for attrs
return None
self.data.position += 1
self.data.skip()
# Look for an encoding between matching quote marks
if self.data.currentByte in (b'"', b"'"):
quoteMark = self.data.currentByte
self.data.position += 1
oldPosition = self.data.position
if self.data.jumpTo(quoteMark):
return self.data[oldPosition:self.data.position]
else:
return None
else:
# Unquoted value
oldPosition = self.data.position
try:
self.data.skipUntil(spaceCharactersBytes)
return self.data[oldPosition:self.data.position]
except StopIteration:
# Return the whole remaining value
return self.data[oldPosition:]
except StopIteration:
return None
def lookupEncoding(encoding):
"""Return the python codec name corresponding to an encoding or None if the
string doesn't correspond to a valid encoding."""
if isinstance(encoding, binary_type):
try:
encoding = encoding.decode("ascii")
except UnicodeDecodeError:
return None
if encoding is not None:
try:
return webencodings.lookup(encoding)
except AttributeError:
return None
else:
return None
|
AlexanderFabisch/scikit-learn | refs/heads/master | sklearn/utils/graph.py | 289 | """
Graph utilities and algorithms
Graphs are represented with their adjacency matrices, preferably using
sparse matrices.
"""
# Authors: Aric Hagberg <[email protected]>
# Gael Varoquaux <[email protected]>
# Jake Vanderplas <[email protected]>
# License: BSD 3 clause
import numpy as np
from scipy import sparse
from .validation import check_array
from .graph_shortest_path import graph_shortest_path
###############################################################################
# Path and connected component analysis.
# Code adapted from networkx
def single_source_shortest_path_length(graph, source, cutoff=None):
"""Return the shortest path length from source to all reachable nodes.
Returns a dictionary of shortest path lengths keyed by target.
Parameters
----------
graph: sparse matrix or 2D array (preferably LIL matrix)
Adjacency matrix of the graph
source : node label
Starting node for path
cutoff : integer, optional
Depth to stop the search - only
paths of length <= cutoff are returned.
Examples
--------
>>> from sklearn.utils.graph import single_source_shortest_path_length
>>> import numpy as np
>>> graph = np.array([[ 0, 1, 0, 0],
... [ 1, 0, 1, 0],
... [ 0, 1, 0, 1],
... [ 0, 0, 1, 0]])
>>> single_source_shortest_path_length(graph, 0)
{0: 0, 1: 1, 2: 2, 3: 3}
>>> single_source_shortest_path_length(np.ones((6, 6)), 2)
{0: 1, 1: 1, 2: 0, 3: 1, 4: 1, 5: 1}
"""
if sparse.isspmatrix(graph):
graph = graph.tolil()
else:
graph = sparse.lil_matrix(graph)
seen = {} # level (number of hops) when seen in BFS
level = 0 # the current level
next_level = [source] # dict of nodes to check at next level
while next_level:
this_level = next_level # advance to next level
next_level = set() # and start a new list (fringe)
for v in this_level:
if v not in seen:
seen[v] = level # set the level of vertex v
next_level.update(graph.rows[v])
if cutoff is not None and cutoff <= level:
break
level += 1
return seen # return all path lengths as dictionary
if hasattr(sparse, 'connected_components'):
connected_components = sparse.connected_components
else:
from .sparsetools import connected_components
###############################################################################
# Graph laplacian
def graph_laplacian(csgraph, normed=False, return_diag=False):
""" Return the Laplacian matrix of a directed graph.
For non-symmetric graphs the out-degree is used in the computation.
Parameters
----------
csgraph : array_like or sparse matrix, 2 dimensions
compressed-sparse graph, with shape (N, N).
normed : bool, optional
If True, then compute normalized Laplacian.
return_diag : bool, optional
If True, then return diagonal as well as laplacian.
Returns
-------
lap : ndarray
The N x N laplacian matrix of graph.
diag : ndarray
The length-N diagonal of the laplacian matrix.
diag is returned only if return_diag is True.
Notes
-----
The Laplacian matrix of a graph is sometimes referred to as the
"Kirchoff matrix" or the "admittance matrix", and is useful in many
parts of spectral graph theory. In particular, the eigen-decomposition
of the laplacian matrix can give insight into many properties of the graph.
For non-symmetric directed graphs, the laplacian is computed using the
out-degree of each node.
"""
if csgraph.ndim != 2 or csgraph.shape[0] != csgraph.shape[1]:
raise ValueError('csgraph must be a square matrix or array')
if normed and (np.issubdtype(csgraph.dtype, np.int)
or np.issubdtype(csgraph.dtype, np.uint)):
csgraph = check_array(csgraph, dtype=np.float64, accept_sparse=True)
if sparse.isspmatrix(csgraph):
return _laplacian_sparse(csgraph, normed=normed,
return_diag=return_diag)
else:
return _laplacian_dense(csgraph, normed=normed,
return_diag=return_diag)
def _laplacian_sparse(graph, normed=False, return_diag=False):
n_nodes = graph.shape[0]
if not graph.format == 'coo':
lap = (-graph).tocoo()
else:
lap = -graph.copy()
diag_mask = (lap.row == lap.col)
if not diag_mask.sum() == n_nodes:
# The sparsity pattern of the matrix has holes on the diagonal,
# we need to fix that
diag_idx = lap.row[diag_mask]
diagonal_holes = list(set(range(n_nodes)).difference(diag_idx))
new_data = np.concatenate([lap.data, np.ones(len(diagonal_holes))])
new_row = np.concatenate([lap.row, diagonal_holes])
new_col = np.concatenate([lap.col, diagonal_holes])
lap = sparse.coo_matrix((new_data, (new_row, new_col)),
shape=lap.shape)
diag_mask = (lap.row == lap.col)
lap.data[diag_mask] = 0
w = -np.asarray(lap.sum(axis=1)).squeeze()
if normed:
w = np.sqrt(w)
w_zeros = (w == 0)
w[w_zeros] = 1
lap.data /= w[lap.row]
lap.data /= w[lap.col]
lap.data[diag_mask] = (1 - w_zeros[lap.row[diag_mask]]).astype(
lap.data.dtype)
else:
lap.data[diag_mask] = w[lap.row[diag_mask]]
if return_diag:
return lap, w
return lap
def _laplacian_dense(graph, normed=False, return_diag=False):
n_nodes = graph.shape[0]
lap = -np.asarray(graph) # minus sign leads to a copy
# set diagonal to zero
lap.flat[::n_nodes + 1] = 0
w = -lap.sum(axis=0)
if normed:
w = np.sqrt(w)
w_zeros = (w == 0)
w[w_zeros] = 1
lap /= w
lap /= w[:, np.newaxis]
lap.flat[::n_nodes + 1] = (1 - w_zeros).astype(lap.dtype)
else:
lap.flat[::n_nodes + 1] = w.astype(lap.dtype)
if return_diag:
return lap, w
return lap
|
thaumos/ansible | refs/heads/devel | lib/ansible/plugins/cliconf/enos.py | 16 | # (C) 2017 Red Hat Inc.
# Copyright (C) 2017 Lenovo.
#
# GNU General Public License v3.0+
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
# Contains CLIConf Plugin methods for ENOS Modules
# Lenovo Networking
#
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = """
---
cliconf: enos
short_description: Use enos cliconf to run command on Lenovo ENOS platform
description:
- This enos plugin provides low level abstraction apis for
sending and receiving CLI commands from Lenovo ENOS network devices.
version_added: "2.5"
"""
import re
import json
from itertools import chain
from ansible.module_utils._text import to_bytes, to_text
from ansible.module_utils.network.common.utils import to_list
from ansible.plugins.cliconf import CliconfBase, enable_mode
class Cliconf(CliconfBase):
def get_device_info(self):
device_info = {}
device_info['network_os'] = 'enos'
reply = self.get('show version')
data = to_text(reply, errors='surrogate_or_strict').strip()
match = re.search(r'^Software Version (.*?) ', data, re.M | re.I)
if match:
device_info['network_os_version'] = match.group(1)
match = re.search(r'^Lenovo RackSwitch (\S+)', data, re.M | re.I)
if match:
device_info['network_os_model'] = match.group(1)
match = re.search(r'^(.+) uptime', data, re.M)
if match:
device_info['network_os_hostname'] = match.group(1)
else:
device_info['network_os_hostname'] = "NA"
return device_info
@enable_mode
def get_config(self, source='running', format='text', flags=None):
if source not in ('running', 'startup'):
msg = "fetching configuration from %s is not supported"
return self.invalid_params(msg % source)
if source == 'running':
cmd = 'show running-config'
else:
cmd = 'show startup-config'
return self.send_command(cmd)
@enable_mode
def edit_config(self, command):
for cmd in chain(['configure terminal'], to_list(command), ['end']):
self.send_command(cmd)
def get(self, command, prompt=None, answer=None, sendonly=False, check_all=False):
return self.send_command(command=command, prompt=prompt, answer=answer, sendonly=sendonly, check_all=check_all)
def get_capabilities(self):
result = super(Cliconf, self).get_capabilities()
return json.dumps(result)
|
IllusionRom-deprecated/android_platform_tools_idea | refs/heads/master | python/lib/Lib/encodings/hex_codec.py | 528 | """ Python 'hex_codec' Codec - 2-digit hex content transfer encoding
Unlike most of the other codecs which target Unicode, this codec
will return Python string objects for both encode and decode.
Written by Marc-Andre Lemburg ([email protected]).
"""
import codecs, binascii
### Codec APIs
def hex_encode(input,errors='strict'):
""" Encodes the object input and returns a tuple (output
object, length consumed).
errors defines the error handling to apply. It defaults to
'strict' handling which is the only currently supported
error handling for this codec.
"""
assert errors == 'strict'
output = binascii.b2a_hex(input)
return (output, len(input))
def hex_decode(input,errors='strict'):
""" Decodes the object input and returns a tuple (output
object, length consumed).
input must be an object which provides the bf_getreadbuf
buffer slot. Python strings, buffer objects and memory
mapped files are examples of objects providing this slot.
errors defines the error handling to apply. It defaults to
'strict' handling which is the only currently supported
error handling for this codec.
"""
assert errors == 'strict'
output = binascii.a2b_hex(input)
return (output, len(input))
class Codec(codecs.Codec):
def encode(self, input,errors='strict'):
return hex_encode(input,errors)
def decode(self, input,errors='strict'):
return hex_decode(input,errors)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
assert self.errors == 'strict'
return binascii.b2a_hex(input)
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
assert self.errors == 'strict'
return binascii.a2b_hex(input)
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='hex',
encode=hex_encode,
decode=hex_decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamwriter=StreamWriter,
streamreader=StreamReader,
)
|
zengboming/python | refs/heads/master | udp1.py | 1 | #udp1.py
import socket
s=socket.socket(socket.AF_INET,socket.SOCK_DGRAM)
s.bind(('127.0.0.1',9999))
print 'Bind UDP on 9999...'
while True:
data,addr=s.recvfrom(1024)
print 'Received from %s:%s' %addr
s.sendto('Hello,%s!'%data,addr)
|
LumPenPacK/NetworkExtractionFromImages | refs/heads/master | win_build/nefi2_win_amd64_msvc_2015/site-packages/networkx/algorithms/centrality/harmonic.py | 10 | """
Harmonic centrality measure.
"""
# Copyright (C) 2015 by
# Alessandro Luongo
# BSD license.
from __future__ import division
import functools
import networkx as nx
__author__ = "\n".join(['Alessandro Luongo ([email protected]'])
__all__ = ['harmonic_centrality']
def harmonic_centrality(G, distance=None):
r"""Compute harmonic centrality for nodes.
Harmonic centrality [1]_ of a node `u` is the sum of the reciprocal
of the shortest path distances from all other nodes to `u`
.. math::
C(u) = \sum_{v \neq u} \frac{1}{d(v, u)}
where `d(v, u)` is the shortest-path distance between `v` and `u`.
Notice that higher values indicate higher centrality.
Parameters
----------
G : graph
A NetworkX graph
distance : edge attribute key, optional (default=None)
Use the specified edge attribute as the edge distance in shortest
path calculations. If `None`, then each edge will have distance equal to 1.
Returns
-------
nodes : dictionary
Dictionary of nodes with harmonic centrality as the value.
See Also
--------
betweenness_centrality, load_centrality, eigenvector_centrality,
degree_centrality, closeness_centrality
Notes
-----
If the 'distance' keyword is set to an edge attribute key then the
shortest-path length will be computed using Dijkstra's algorithm with
that edge attribute as the edge weight.
References
----------
.. [1] Boldi, Paolo, and Sebastiano Vigna. "Axioms for centrality." Internet Mathematics 10.3-4 (2014): 222-262.
"""
if distance is not None:
# use Dijkstra's algorithm with specified attribute as edge weight
path_length = functools.partial(nx.all_pairs_dijkstra_path_length,
weight=distance)
else:
path_length = nx.all_pairs_shortest_path_length
nodes = G.nodes()
harmonic_centrality = {}
if len(G) <= 1:
for singleton in nodes:
harmonic_centrality[singleton] = 0.0
return harmonic_centrality
sp = path_length(G.reverse() if G.is_directed() else G)
for n in nodes:
harmonic_centrality[n] = sum([1/i if i > 0 else 0 for i in sp[n].values()])
return harmonic_centrality
|
javacruft/pylxd | refs/heads/master | pylxd/connection.py | 1 | # Copyright (c) 2015 Canonical Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import json
import os
import socket
import ssl
from pylxd import exceptions
from pylxd import utils
from six.moves import http_client
class UnixHTTPConnection(http_client.HTTPConnection):
def __init__(self, path, host='localhost', port=None, strict=None,
timeout=None):
http_client.HTTPConnection.__init__(self, host, port=port,
strict=strict,
timeout=timeout)
self.path = path
def connect(self):
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
sock.connect(self.path)
self.sock = sock
class HTTPSConnection(http_client.HTTPConnection):
default_port = 8443
def __init__(self, *args, **kwargs):
http_client.HTTPConnection.__init__(self, *args, **kwargs)
def connect(self):
sock = socket.create_connection((self.host, self.port),
self.timeout, self.source_address)
if self._tunnel_host:
self.sock = sock
self._tunnel()
(cert_file, key_file) = self._get_ssl_certs()
self.sock = ssl.wrap_socket(sock, certfile=cert_file,
keyfile=key_file,
ssl_version=ssl.PROTOCOL_TLSv1_2)
@staticmethod
def _get_ssl_certs():
return (os.path.join(os.environ['HOME'], '.config/lxc/client.crt'),
os.path.join(os.environ['HOME'], '.config/lxc/client.key'))
class LXDConnection(object):
def __init__(self, host=None, port=8443):
if host:
self.host = host
self.port = port
self.unix_socket = None
else:
if 'LXD_DIR' in os.environ:
self.unix_socket = os.path.join(os.environ['LXD_DIR'],
'unix.socket')
else:
self.unix_socket = '/var/lib/lxd/unix.socket'
self.host, self.port = None, None
self.connection = None
def get_connection(self):
if self.host:
return HTTPSConnection(self.host, self.port)
return UnixHTTPConnection(self.unix_socket)
def get_object(self, *args, **kwargs):
self.connection = self.get_connection()
self.connection.request(*args, **kwargs)
response = self.connection.getresponse()
state = response.status
data = json.loads(response.read())
if not data:
msg = "Null Data"
raise exceptions.PyLXDException(msg)
elif state == 200 or (state == 202 and data.get('status_code') == 100):
return state, data
else:
utils.get_lxd_error(state, data)
def get_status(self, *args, **kwargs):
status = False
self.connection = self.get_connection()
self.connection.request(*args, **kwargs)
response = self.connection.getresponse()
state = response.status
data = json.loads(response.read())
if not data:
msg = "Null Data"
raise exceptions.PyLXDException(msg)
elif data.get('error'):
utils.get_lxd_error(state, data)
elif state == 200 or (state == 202 and data.get('status_code') == 100):
status = True
return status
def get_raw(self, *args, **kwargs):
self.connection = self.get_connection()
self.connection.request(*args, **kwargs)
response = self.connection.getresponse()
body = response.read()
if not body:
msg = "Null Body"
raise exceptions.PyLXDException(msg)
elif response.status == 200:
return body
else:
msg = "Failed to get raw response"
raise exceptions.PyLXDException(msg)
def get_ws(self, *args, **kwargs):
self.connection = self.get_connection()
self.connection.request(*args, **kwargs)
response = self.connection.getresponse()
return response.status
|
spring01/libPSI | refs/heads/master | tests/testgrendel/grendel_tests/util_tests/test_metaclasses.py | 1 | import unittest
import sys
import os
# Add the directory containing the 'grendel_tests' package to sys.path
#sys.path.insert(0, os.path.join(os.path.abspath(os.path.dirname(__file__)), os.pardir, os.pardir, os.pardir, "lib", "python"))
#sys.path.insert(0, "/Users/dhollman/Projects/PyGrendel")
from grendel.util.metaclasses import Immutable, mutator
class Foo(object):
__metaclass__ = Immutable
def __init__(self, a, b, c):
self.a, self.b, self.c = a, b, c
self.change_a(a)
self.abc = a, b, c
def __str__(self):
return "Foo" + str((self.a, self.b, self.c))
__repr__ = __str__
@mutator
def change_a(self, new_val):
self.a = new_val
@property
def abc(self):
return self.a, self.b, self.c
@abc.setter
def abc(self, val):
self.a, self.b, self.c = val
class ImmutableMetaclassTest(unittest.TestCase):
def test_construct_1(self):
t = Foo(1, 'a', (123, "abc"))
self.assertEqual(str(t), "Foo(1, 'a', (123, 'abc'))")
self.assertEqual(t.b, 'a')
self.assertEqual(t.a, 1)
self.assertEqual(t.c, (123, "abc"))
def test_construct_2(self):
t = Foo(1, Foo(1, 2, 3), (123, "abc", Foo('a', 2, 4)))
self.assertEqual(str(t), "Foo(1, Foo(1, 2, 3), (123, 'abc', Foo('a', 2, 4)))")
def test_construct_3(self):
with self.assertRaises(AttributeError):
Foo(["hello", "world"], 'a', (123, "abc"))
def test_no_set_1(self):
t = Foo(1, 'a', (123, "abc"))
with self.assertRaisesRegexp(TypeError, "Can't change attribute 'a' of object of immutable type 'Foo'"):
t.a = 6
def test_no_set_2(self):
t = Foo(1, 'a', (123, "abc"))
with self.assertRaisesRegexp(TypeError, "Can't change attribute 'a' of object of immutable type 'Foo'"):
setattr(t, 'a', 6)
def test_no_set_3(self):
t = Foo(1, 'a', (123, "abc"))
with self.assertRaisesRegexp(TypeError, "Can't change attribute 'a' of object of immutable type 'Foo'"):
t.change_a(6)
def test_property(self):
t = Foo(1, 'a', (123, "abc"))
self.assertEqual(t.abc, (1, 'a', (123, "abc")))
def test_no_set_property(self):
t = Foo(1, 'a', (123, "abc"))
with self.assertRaisesRegexp(TypeError, "Can't change attribute 'abc' of object of immutable type 'Foo'"):
t.abc = 1, 2, 3
def test_subclass_1(self):
with self.assertRaises(TypeError):
class Bar(Foo):
def __setattr__(self, key, value):
pass
def test_subclass_2(self):
with self.assertRaises(TypeError):
class Bar(Foo):
def __delattr__(self, item):
pass
def test_subclass_3(self):
with self.assertRaises(TypeError):
class FooBar(object):
pass
class Bar(FooBar):
__metaclass__ = Immutable
pass
def test_subclass_4(self):
class Bar(Foo):
__metaclass__ = Immutable
def __init__(self, a, b, c, d):
self.d = d
self.change_a(a)
super(Bar, self).__init__(a, b, c)
bar = Bar(1, 2, 3, 4)
self.assertEqual(str(bar), "Foo(1, 2, 3)")
|
gisce/OCB | refs/heads/7.0 | addons/account/wizard/account_report_general_journal.py | 56 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
class account_general_journal(osv.osv_memory):
_inherit = "account.common.journal.report"
_name = 'account.general.journal'
_description = 'Account General Journal'
_columns = {
'journal_ids': fields.many2many('account.journal', 'account_general_journal_journal_rel', 'account_id', 'journal_id', 'Journals', required=True),
}
def _print_report(self, cr, uid, ids, data, context=None):
data = self.pre_print_report(cr, uid, ids, data, context=context)
return {'type': 'ir.actions.report.xml', 'report_name': 'account.general.journal', 'datas': data}
account_general_journal()
#vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
Chromium97/lnmiithackathon | refs/heads/master | website/wsgi.py | 3 | """
WSGI config for website project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/howto/deployment/wsgi/
"""
from django.core.wsgi import get_wsgi_application
from whitenoise.django import DjangoWhiteNoise
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "website.settings")
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
application = DjangoWhiteNoise(application)
|
beezee/GAE-Django-site | refs/heads/master | django/conf/locale/de/formats.py | 329 | # -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = 'j. F Y'
TIME_FORMAT = 'H:i:s'
DATETIME_FORMAT = 'j. F Y H:i:s'
YEAR_MONTH_FORMAT = 'F Y'
MONTH_DAY_FORMAT = 'j. F'
SHORT_DATE_FORMAT = 'd.m.Y'
SHORT_DATETIME_FORMAT = 'd.m.Y H:i:s'
FIRST_DAY_OF_WEEK = 1 # Monday
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
DATE_INPUT_FORMATS = (
'%d.%m.%Y', '%d.%m.%y', # '25.10.2006', '25.10.06'
'%Y-%m-%d', '%y-%m-%d', # '2006-10-25', '06-10-25'
# '%d. %B %Y', '%d. %b. %Y', # '25. October 2006', '25. Oct. 2006'
)
TIME_INPUT_FORMATS = (
'%H:%M:%S', # '14:30:59'
'%H:%M', # '14:30'
)
DATETIME_INPUT_FORMATS = (
'%d.%m.%Y %H:%M:%S', # '25.10.2006 14:30:59'
'%d.%m.%Y %H:%M', # '25.10.2006 14:30'
'%d.%m.%Y', # '25.10.2006'
'%Y-%m-%d %H:%M:%S', # '2006-10-25 14:30:59'
'%Y-%m-%d %H:%M', # '2006-10-25 14:30'
'%Y-%m-%d', # '2006-10-25'
)
DECIMAL_SEPARATOR = ','
THOUSAND_SEPARATOR = '.'
NUMBER_GROUPING = 3
|
Donkyhotay/MoonPy | refs/heads/master | zope/testing/testrunner-ex/sample1/sample11/__init__.py | 9480 | #
|
broferek/ansible | refs/heads/devel | test/units/modules/network/nso/test_nso_verify.py | 40 | #
# Copyright (c) 2017 Cisco and/or its affiliates.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
import json
from units.compat.mock import patch
from ansible.modules.network.nso import nso_verify
from . import nso_module
from .nso_module import MockResponse
from units.modules.utils import set_module_args
class TestNsoVerify(nso_module.TestNsoModule):
module = nso_verify
@patch('ansible.module_utils.network.nso.nso.open_url')
def test_nso_verify_empty_data(self, open_url_mock):
calls = [
MockResponse('login', {}, 200, '{}', {'set-cookie': 'id'}),
MockResponse('get_system_setting', {'operation': 'version'}, 200, '{"result": "4.4.3"}'),
MockResponse('logout', {}, 200, '{"result": {}}'),
]
open_url_mock.side_effect = lambda *args, **kwargs: nso_module.mock_call(calls, *args, **kwargs)
data = {}
set_module_args({
'username': 'user', 'password': 'password',
'url': 'http://localhost:8080/jsonrpc', 'data': data
})
self.execute_module(changed=False)
self.assertEqual(0, len(calls))
@patch('ansible.module_utils.network.nso.nso.open_url')
def test_nso_verify_violation(self, open_url_mock):
devices_schema = nso_module.load_fixture('devices_schema.json')
device_schema = nso_module.load_fixture('device_schema.json')
description_schema = nso_module.load_fixture('description_schema.json')
calls = [
MockResponse('login', {}, 200, '{}', {'set-cookie': 'id'}),
MockResponse('get_system_setting', {'operation': 'version'}, 200, '{"result": "4.5.0"}'),
MockResponse('get_module_prefix_map', {}, 200, '{"result": {"tailf-ncs": "ncs"}}'),
MockResponse('new_trans', {'mode': 'read'}, 200, '{"result": {"th": 1}}'),
MockResponse('get_schema', {'path': '/ncs:devices'}, 200, '{"result": %s}' % (json.dumps(devices_schema, ))),
MockResponse('get_schema', {'path': '/ncs:devices/device'}, 200, '{"result": %s}' % (json.dumps(device_schema, ))),
MockResponse('exists', {'path': '/ncs:devices/device{ce0}'}, 200, '{"result": {"exists": true}}'),
MockResponse('get_value', {'path': '/ncs:devices/device{ce0}/description'}, 200, '{"result": {"value": "In Violation"}}'),
MockResponse('get_schema', {'path': '/ncs:devices/device/description'}, 200, '{"result": %s}' % (json.dumps(description_schema, ))),
MockResponse('logout', {}, 200, '{"result": {}}'),
]
open_url_mock.side_effect = lambda *args, **kwargs: nso_module.mock_call(calls, *args, **kwargs)
data = nso_module.load_fixture('verify_violation_data.json')
set_module_args({
'username': 'user', 'password': 'password',
'url': 'http://localhost:8080/jsonrpc', 'data': data
})
self.execute_module(failed=True, violations=[
{'path': '/ncs:devices/device{ce0}/description', 'expected-value': 'Example Device', 'value': 'In Violation'},
])
self.assertEqual(0, len(calls))
@patch('ansible.module_utils.network.nso.nso.open_url')
def test_nso_verify_ok(self, open_url_mock):
devices_schema = nso_module.load_fixture('devices_schema.json')
device_schema = nso_module.load_fixture('device_schema.json')
calls = [
MockResponse('login', {}, 200, '{}', {'set-cookie': 'id'}),
MockResponse('get_system_setting', {'operation': 'version'}, 200, '{"result": "4.5.0"}'),
MockResponse('get_module_prefix_map', {}, 200, '{"result": {"tailf-ncs": "ncs"}}'),
MockResponse('new_trans', {'mode': 'read'}, 200, '{"result": {"th": 1}}'),
MockResponse('get_schema', {'path': '/ncs:devices'}, 200, '{"result": %s}' % (json.dumps(devices_schema, ))),
MockResponse('get_schema', {'path': '/ncs:devices/device'}, 200, '{"result": %s}' % (json.dumps(device_schema, ))),
MockResponse('exists', {'path': '/ncs:devices/device{ce0}'}, 200, '{"result": {"exists": true}}'),
MockResponse('get_value', {'path': '/ncs:devices/device{ce0}/description'}, 200, '{"result": {"value": "Example Device"}}'),
MockResponse('logout', {}, 200, '{"result": {}}'),
]
open_url_mock.side_effect = lambda *args, **kwargs: nso_module.mock_call(calls, *args, **kwargs)
data = nso_module.load_fixture('verify_violation_data.json')
set_module_args({
'username': 'user', 'password': 'password',
'url': 'http://localhost:8080/jsonrpc', 'data': data,
'validate_certs': False
})
self.execute_module(changed=False)
self.assertEqual(0, len(calls))
|
ForestMars/ks | refs/heads/master | sites/all/libraries/fckeditor/editor/filemanager/connectors/py/upload.py | 45 | #!/usr/bin/env python
"""
FCKeditor - The text editor for Internet - http://www.fckeditor.net
Copyright (C) 2003-2010 Frederico Caldeira Knabben
== BEGIN LICENSE ==
Licensed under the terms of any of the following licenses at your
choice:
- GNU General Public License Version 2 or later (the "GPL")
http://www.gnu.org/licenses/gpl.html
- GNU Lesser General Public License Version 2.1 or later (the "LGPL")
http://www.gnu.org/licenses/lgpl.html
- Mozilla Public License Version 1.1 or later (the "MPL")
http://www.mozilla.org/MPL/MPL-1.1.html
== END LICENSE ==
This is the "File Uploader" for Python
"""
import os
from fckutil import *
from fckcommands import * # default command's implementation
from fckconnector import FCKeditorConnectorBase # import base connector
import config as Config
class FCKeditorQuickUpload( FCKeditorConnectorBase,
UploadFileCommandMixin,
BaseHttpMixin, BaseHtmlMixin):
def doResponse(self):
"Main function. Process the request, set headers and return a string as response."
# Check if this connector is disabled
if not(Config.Enabled):
return self.sendUploadResults(1, "This file uploader is disabled. Please check the \"editor/filemanager/connectors/py/config.py\"")
command = 'QuickUpload'
# The file type (from the QueryString, by default 'File').
resourceType = self.request.get('Type','File')
currentFolder = "/"
# Check for invalid paths
if currentFolder is None:
return self.sendUploadResults(102, '', '', "")
# Check if it is an allowed command
if ( not command in Config.ConfigAllowedCommands ):
return self.sendUploadResults( 1, '', '', 'The %s command isn\'t allowed' % command )
if ( not resourceType in Config.ConfigAllowedTypes ):
return self.sendUploadResults( 1, '', '', 'Invalid type specified' )
# Setup paths
self.userFilesFolder = Config.QuickUploadAbsolutePath[resourceType]
self.webUserFilesFolder = Config.QuickUploadPath[resourceType]
if not self.userFilesFolder: # no absolute path given (dangerous...)
self.userFilesFolder = mapServerPath(self.environ,
self.webUserFilesFolder)
# Ensure that the directory exists.
if not os.path.exists(self.userFilesFolder):
try:
self.createServerFoldercreateServerFolder( self.userFilesFolder )
except:
return self.sendError(1, "This connector couldn\'t access to local user\'s files directories. Please check the UserFilesAbsolutePath in \"editor/filemanager/connectors/py/config.py\" and try again. ")
# File upload doesn't have to return XML, so intercept here
return self.uploadFile(resourceType, currentFolder)
# Running from command line (plain old CGI)
if __name__ == '__main__':
try:
# Create a Connector Instance
conn = FCKeditorQuickUpload()
data = conn.doResponse()
for header in conn.headers:
if not header is None:
print '%s: %s' % header
print
print data
except:
print "Content-Type: text/plain"
print
import cgi
cgi.print_exception()
|
itbabu/django-oscar | refs/heads/master | src/oscar/apps/analytics/reports.py | 24 | from django.utils.translation import ugettext_lazy as _
from oscar.core.loading import get_class, get_model
ReportGenerator = get_class('dashboard.reports.reports', 'ReportGenerator')
ReportCSVFormatter = get_class('dashboard.reports.reports',
'ReportCSVFormatter')
ReportHTMLFormatter = get_class('dashboard.reports.reports',
'ReportHTMLFormatter')
ProductRecord = get_model('analytics', 'ProductRecord')
UserRecord = get_model('analytics', 'UserRecord')
class ProductReportCSVFormatter(ReportCSVFormatter):
filename_template = 'conditional-offer-performance.csv'
def generate_csv(self, response, products):
writer = self.get_csv_writer(response)
header_row = [_('Product'),
_('Views'),
_('Basket additions'),
_('Purchases')]
writer.writerow(header_row)
for record in products:
row = [record.product,
record.num_views,
record.num_basket_additions,
record.num_purchases]
writer.writerow(row)
class ProductReportHTMLFormatter(ReportHTMLFormatter):
filename_template = 'dashboard/reports/partials/product_report.html'
class ProductReportGenerator(ReportGenerator):
code = 'product_analytics'
description = _('Product analytics')
formatters = {
'CSV_formatter': ProductReportCSVFormatter,
'HTML_formatter': ProductReportHTMLFormatter}
def report_description(self):
return self.description
def generate(self):
records = ProductRecord._default_manager.all()
return self.formatter.generate_response(records)
def is_available_to(self, user):
return user.is_staff
class UserReportCSVFormatter(ReportCSVFormatter):
filename_template = 'user-analytics.csv'
def generate_csv(self, response, users):
writer = self.get_csv_writer(response)
header_row = [_('Name'),
_('Date registered'),
_('Product views'),
_('Basket additions'),
_('Orders'),
_('Order lines'),
_('Order items'),
_('Total spent'),
_('Date of last order')]
writer.writerow(header_row)
for record in users:
row = [record.user.get_full_name(),
self.format_date(record.user.date_joined),
record.num_product_views,
record.num_basket_additions,
record.num_orders,
record.num_order_lines,
record.num_order_items,
record.total_spent,
self.format_datetime(record.date_last_order)]
writer.writerow(row)
class UserReportHTMLFormatter(ReportHTMLFormatter):
filename_template = 'dashboard/reports/partials/user_report.html'
class UserReportGenerator(ReportGenerator):
code = 'user_analytics'
description = _('User analytics')
formatters = {
'CSV_formatter': UserReportCSVFormatter,
'HTML_formatter': UserReportHTMLFormatter}
def generate(self):
users = UserRecord._default_manager.select_related().all()
return self.formatter.generate_response(users)
def is_available_to(self, user):
return user.is_staff
|
duducosmos/pgs4a | refs/heads/master | python-install/lib/python2.7/ctypes/test/test_cast.py | 82 | from ctypes import *
import unittest
import sys
class Test(unittest.TestCase):
def test_array2pointer(self):
array = (c_int * 3)(42, 17, 2)
# casting an array to a pointer works.
ptr = cast(array, POINTER(c_int))
self.assertEqual([ptr[i] for i in range(3)], [42, 17, 2])
if 2*sizeof(c_short) == sizeof(c_int):
ptr = cast(array, POINTER(c_short))
if sys.byteorder == "little":
self.assertEqual([ptr[i] for i in range(6)],
[42, 0, 17, 0, 2, 0])
else:
self.assertEqual([ptr[i] for i in range(6)],
[0, 42, 0, 17, 0, 2])
def test_address2pointer(self):
array = (c_int * 3)(42, 17, 2)
address = addressof(array)
ptr = cast(c_void_p(address), POINTER(c_int))
self.assertEqual([ptr[i] for i in range(3)], [42, 17, 2])
ptr = cast(address, POINTER(c_int))
self.assertEqual([ptr[i] for i in range(3)], [42, 17, 2])
def test_p2a_objects(self):
array = (c_char_p * 5)()
self.assertEqual(array._objects, None)
array[0] = "foo bar"
self.assertEqual(array._objects, {'0': "foo bar"})
p = cast(array, POINTER(c_char_p))
# array and p share a common _objects attribute
self.assertTrue(p._objects is array._objects)
self.assertEqual(array._objects, {'0': "foo bar", id(array): array})
p[0] = "spam spam"
self.assertEqual(p._objects, {'0': "spam spam", id(array): array})
self.assertTrue(array._objects is p._objects)
p[1] = "foo bar"
self.assertEqual(p._objects, {'1': 'foo bar', '0': "spam spam", id(array): array})
self.assertTrue(array._objects is p._objects)
def test_other(self):
p = cast((c_int * 4)(1, 2, 3, 4), POINTER(c_int))
self.assertEqual(p[:4], [1,2, 3, 4])
self.assertEqual(p[:4:], [1, 2, 3, 4])
self.assertEqual(p[3:-1:-1], [4, 3, 2, 1])
self.assertEqual(p[:4:3], [1, 4])
c_int()
self.assertEqual(p[:4], [1, 2, 3, 4])
self.assertEqual(p[:4:], [1, 2, 3, 4])
self.assertEqual(p[3:-1:-1], [4, 3, 2, 1])
self.assertEqual(p[:4:3], [1, 4])
p[2] = 96
self.assertEqual(p[:4], [1, 2, 96, 4])
self.assertEqual(p[:4:], [1, 2, 96, 4])
self.assertEqual(p[3:-1:-1], [4, 96, 2, 1])
self.assertEqual(p[:4:3], [1, 4])
c_int()
self.assertEqual(p[:4], [1, 2, 96, 4])
self.assertEqual(p[:4:], [1, 2, 96, 4])
self.assertEqual(p[3:-1:-1], [4, 96, 2, 1])
self.assertEqual(p[:4:3], [1, 4])
def test_char_p(self):
# This didn't work: bad argument to internal function
s = c_char_p("hiho")
self.assertEqual(cast(cast(s, c_void_p), c_char_p).value,
"hiho")
try:
c_wchar_p
except NameError:
pass
else:
def test_wchar_p(self):
s = c_wchar_p("hiho")
self.assertEqual(cast(cast(s, c_void_p), c_wchar_p).value,
"hiho")
if __name__ == "__main__":
unittest.main()
|
2014c2g6/c2g6 | refs/heads/master | exts/w2/static/Brython2.0.0-20140209-164925/Lib/xml/dom/minicompat.py | 781 | """Python version compatibility support for minidom."""
# This module should only be imported using "import *".
#
# The following names are defined:
#
# NodeList -- lightest possible NodeList implementation
#
# EmptyNodeList -- lightest possible NodeList that is guaranteed to
# remain empty (immutable)
#
# StringTypes -- tuple of defined string types
#
# defproperty -- function used in conjunction with GetattrMagic;
# using these together is needed to make them work
# as efficiently as possible in both Python 2.2+
# and older versions. For example:
#
# class MyClass(GetattrMagic):
# def _get_myattr(self):
# return something
#
# defproperty(MyClass, "myattr",
# "return some value")
#
# For Python 2.2 and newer, this will construct a
# property object on the class, which avoids
# needing to override __getattr__(). It will only
# work for read-only attributes.
#
# For older versions of Python, inheriting from
# GetattrMagic will use the traditional
# __getattr__() hackery to achieve the same effect,
# but less efficiently.
#
# defproperty() should be used for each version of
# the relevant _get_<property>() function.
__all__ = ["NodeList", "EmptyNodeList", "StringTypes", "defproperty"]
import xml.dom
StringTypes = (str,)
class NodeList(list):
__slots__ = ()
def item(self, index):
if 0 <= index < len(self):
return self[index]
def _get_length(self):
return len(self)
def _set_length(self, value):
raise xml.dom.NoModificationAllowedErr(
"attempt to modify read-only attribute 'length'")
length = property(_get_length, _set_length,
doc="The number of nodes in the NodeList.")
def __getstate__(self):
return list(self)
def __setstate__(self, state):
self[:] = state
class EmptyNodeList(tuple):
__slots__ = ()
def __add__(self, other):
NL = NodeList()
NL.extend(other)
return NL
def __radd__(self, other):
NL = NodeList()
NL.extend(other)
return NL
def item(self, index):
return None
def _get_length(self):
return 0
def _set_length(self, value):
raise xml.dom.NoModificationAllowedErr(
"attempt to modify read-only attribute 'length'")
length = property(_get_length, _set_length,
doc="The number of nodes in the NodeList.")
def defproperty(klass, name, doc):
get = getattr(klass, ("_get_" + name))
def set(self, value, name=name):
raise xml.dom.NoModificationAllowedErr(
"attempt to modify read-only attribute " + repr(name))
assert not hasattr(klass, "_set_" + name), \
"expected not to find _set_" + name
prop = property(get, set, doc=doc)
setattr(klass, name, prop)
|
darktears/chromium-crosswalk | refs/heads/master | third_party/cython/src/Cython/Plex/Transitions.py | 99 | #
# Plex - Transition Maps
#
# This version represents state sets direcly as dicts
# for speed.
#
from sys import maxint as maxint
class TransitionMap(object):
"""
A TransitionMap maps an input event to a set of states.
An input event is one of: a range of character codes,
the empty string (representing an epsilon move), or one
of the special symbols BOL, EOL, EOF.
For characters, this implementation compactly represents
the map by means of a list:
[code_0, states_0, code_1, states_1, code_2, states_2,
..., code_n-1, states_n-1, code_n]
where |code_i| is a character code, and |states_i| is a
set of states corresponding to characters with codes |c|
in the range |code_i| <= |c| <= |code_i+1|.
The following invariants hold:
n >= 1
code_0 == -maxint
code_n == maxint
code_i < code_i+1 for i in 0..n-1
states_0 == states_n-1
Mappings for the special events '', BOL, EOL, EOF are
kept separately in a dictionary.
"""
map = None # The list of codes and states
special = None # Mapping for special events
def __init__(self, map = None, special = None):
if not map:
map = [-maxint, {}, maxint]
if not special:
special = {}
self.map = map
self.special = special
#self.check() ###
def add(self, event, new_state,
TupleType = tuple):
"""
Add transition to |new_state| on |event|.
"""
if type(event) is TupleType:
code0, code1 = event
i = self.split(code0)
j = self.split(code1)
map = self.map
while i < j:
map[i + 1][new_state] = 1
i = i + 2
else:
self.get_special(event)[new_state] = 1
def add_set(self, event, new_set,
TupleType = tuple):
"""
Add transitions to the states in |new_set| on |event|.
"""
if type(event) is TupleType:
code0, code1 = event
i = self.split(code0)
j = self.split(code1)
map = self.map
while i < j:
map[i + 1].update(new_set)
i = i + 2
else:
self.get_special(event).update(new_set)
def get_epsilon(self,
none = None):
"""
Return the mapping for epsilon, or None.
"""
return self.special.get('', none)
def iteritems(self,
len = len):
"""
Return the mapping as an iterable of ((code1, code2), state_set) and
(special_event, state_set) pairs.
"""
result = []
map = self.map
else_set = map[1]
i = 0
n = len(map) - 1
code0 = map[0]
while i < n:
set = map[i + 1]
code1 = map[i + 2]
if set or else_set:
result.append(((code0, code1), set))
code0 = code1
i = i + 2
for event, set in self.special.iteritems():
if set:
result.append((event, set))
return iter(result)
items = iteritems
# ------------------- Private methods --------------------
def split(self, code,
len = len, maxint = maxint):
"""
Search the list for the position of the split point for |code|,
inserting a new split point if necessary. Returns index |i| such
that |code| == |map[i]|.
"""
# We use a funky variation on binary search.
map = self.map
hi = len(map) - 1
# Special case: code == map[-1]
if code == maxint:
return hi
# General case
lo = 0
# loop invariant: map[lo] <= code < map[hi] and hi - lo >= 2
while hi - lo >= 4:
# Find midpoint truncated to even index
mid = ((lo + hi) // 2) & ~1
if code < map[mid]:
hi = mid
else:
lo = mid
# map[lo] <= code < map[hi] and hi - lo == 2
if map[lo] == code:
return lo
else:
map[hi:hi] = [code, map[hi - 1].copy()]
#self.check() ###
return hi
def get_special(self, event):
"""
Get state set for special event, adding a new entry if necessary.
"""
special = self.special
set = special.get(event, None)
if not set:
set = {}
special[event] = set
return set
# --------------------- Conversion methods -----------------------
def __str__(self):
map_strs = []
map = self.map
n = len(map)
i = 0
while i < n:
code = map[i]
if code == -maxint:
code_str = "-inf"
elif code == maxint:
code_str = "inf"
else:
code_str = str(code)
map_strs.append(code_str)
i = i + 1
if i < n:
map_strs.append(state_set_str(map[i]))
i = i + 1
special_strs = {}
for event, set in self.special.iteritems():
special_strs[event] = state_set_str(set)
return "[%s]+%s" % (
','.join(map_strs),
special_strs
)
# --------------------- Debugging methods -----------------------
def check(self):
"""Check data structure integrity."""
if not self.map[-3] < self.map[-1]:
print(self)
assert 0
def dump(self, file):
map = self.map
i = 0
n = len(map) - 1
while i < n:
self.dump_range(map[i], map[i + 2], map[i + 1], file)
i = i + 2
for event, set in self.special.iteritems():
if set:
if not event:
event = 'empty'
self.dump_trans(event, set, file)
def dump_range(self, code0, code1, set, file):
if set:
if code0 == -maxint:
if code1 == maxint:
k = "any"
else:
k = "< %s" % self.dump_char(code1)
elif code1 == maxint:
k = "> %s" % self.dump_char(code0 - 1)
elif code0 == code1 - 1:
k = self.dump_char(code0)
else:
k = "%s..%s" % (self.dump_char(code0),
self.dump_char(code1 - 1))
self.dump_trans(k, set, file)
def dump_char(self, code):
if 0 <= code <= 255:
return repr(chr(code))
else:
return "chr(%d)" % code
def dump_trans(self, key, set, file):
file.write(" %s --> %s\n" % (key, self.dump_set(set)))
def dump_set(self, set):
return state_set_str(set)
#
# State set manipulation functions
#
#def merge_state_sets(set1, set2):
# for state in set2.keys():
# set1[state] = 1
def state_set_str(set):
return "[%s]" % ','.join(["S%d" % state.number for state in set])
|
tedsunnyday/SE-Server | refs/heads/master | server/lib/requests/packages/chardet/eucjpprober.py | 2918 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is mozilla.org code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
import sys
from . import constants
from .mbcharsetprober import MultiByteCharSetProber
from .codingstatemachine import CodingStateMachine
from .chardistribution import EUCJPDistributionAnalysis
from .jpcntx import EUCJPContextAnalysis
from .mbcssm import EUCJPSMModel
class EUCJPProber(MultiByteCharSetProber):
def __init__(self):
MultiByteCharSetProber.__init__(self)
self._mCodingSM = CodingStateMachine(EUCJPSMModel)
self._mDistributionAnalyzer = EUCJPDistributionAnalysis()
self._mContextAnalyzer = EUCJPContextAnalysis()
self.reset()
def reset(self):
MultiByteCharSetProber.reset(self)
self._mContextAnalyzer.reset()
def get_charset_name(self):
return "EUC-JP"
def feed(self, aBuf):
aLen = len(aBuf)
for i in range(0, aLen):
# PY3K: aBuf is a byte array, so aBuf[i] is an int, not a byte
codingState = self._mCodingSM.next_state(aBuf[i])
if codingState == constants.eError:
if constants._debug:
sys.stderr.write(self.get_charset_name()
+ ' prober hit error at byte ' + str(i)
+ '\n')
self._mState = constants.eNotMe
break
elif codingState == constants.eItsMe:
self._mState = constants.eFoundIt
break
elif codingState == constants.eStart:
charLen = self._mCodingSM.get_current_charlen()
if i == 0:
self._mLastChar[1] = aBuf[0]
self._mContextAnalyzer.feed(self._mLastChar, charLen)
self._mDistributionAnalyzer.feed(self._mLastChar, charLen)
else:
self._mContextAnalyzer.feed(aBuf[i - 1:i + 1], charLen)
self._mDistributionAnalyzer.feed(aBuf[i - 1:i + 1],
charLen)
self._mLastChar[0] = aBuf[aLen - 1]
if self.get_state() == constants.eDetecting:
if (self._mContextAnalyzer.got_enough_data() and
(self.get_confidence() > constants.SHORTCUT_THRESHOLD)):
self._mState = constants.eFoundIt
return self.get_state()
def get_confidence(self):
contxtCf = self._mContextAnalyzer.get_confidence()
distribCf = self._mDistributionAnalyzer.get_confidence()
return max(contxtCf, distribCf)
|
defivelo/db | refs/heads/defivelo-215 | apps/user/migrations/0012_userprofile_birthdate.py | 1 | from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('user', '0011_auto_20151009_1532'),
]
operations = [
migrations.AddField(
model_name='userprofile',
name='birthdate',
field=models.DateField(verbose_name='Date', null=True, blank=True),
),
]
|
makacodewalker/etsgh | refs/heads/master | djangoappengine/__init__.py | 12133432 | |
pasqualguerrero/django | refs/heads/master | tests/forms_tests/tests/__init__.py | 12133432 | |
ncrmro/ango | refs/heads/master | server/polls/__init__.py | 12133432 | |
Arlefreak/HorchataClub | refs/heads/master | api/migrations/0002_auto_20151018_0550.py | 1 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('api', '0001_initial'),
]
|
tudorian/eden | refs/heads/master | modules/tests/roles/load_data.py | 28 | from tests.roles.create_role_test_data import create_role_test_data
# Define Organisations
orgs = ["Org-A",
"Org-B",
"Org-C",
"Org-D",
"Org-E",
]
branches = [None,
"Branch-A",
"Branch-B",
"Branch-C",
"Branch-D",
"Branch-E",
]
create_role_test_data(orgs, branches)
|
Subsets and Splits