repo_name
stringlengths 5
100
| path
stringlengths 4
299
| copies
stringclasses 990
values | size
stringlengths 4
7
| content
stringlengths 666
1.03M
| license
stringclasses 15
values | hash
int64 -9,223,351,895,964,839,000
9,223,297,778B
| line_mean
float64 3.17
100
| line_max
int64 7
1k
| alpha_frac
float64 0.25
0.98
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
PercyLau/oonSIM | NFD/.waf-tools/websocket.py | 18 | 3039 | # encoding: utf-8
from waflib import Options, Logs, Errors
from waflib.Configure import conf
import re
def addWebsocketOptions(self, opt):
opt.add_option('--without-websocket', action='store_false', default=True,
dest='with_websocket',
help='Disable WebSocket face support')
setattr(Options.OptionsContext, "addWebsocketOptions", addWebsocketOptions)
@conf
def checkWebsocket(self, **kw):
if not self.options.with_websocket:
return
isMandatory = kw.get('mandatory', True)
self.start_msg('Checking for WebSocket includes')
try:
websocketDir = self.path.find_dir('websocketpp/websocketpp')
if not websocketDir:
raise Errors.WafError('Not found')
versionFile = websocketDir.find_node('version.hpp')
if not websocketDir:
raise Errors.WafError('Corrupted: WebSocket version file not found')
try:
txt = versionFile.read()
except (OSError, IOError):
raise Errors.WafError('Corrupted: cannot read WebSocket version file')
# Looking for the following:
# static int const major_version = 0;
# static int const minor_version = 5;
# static int const patch_version = 1;
version = [None, None, None]
majorVersion = re.compile('^static int const major_version = (\\d+);$', re.M)
version[0] = majorVersion.search(txt)
minorVersion = re.compile('^static int const minor_version = (\\d+);$', re.M)
version[1] = minorVersion.search(txt)
patchVersion = re.compile('^static int const patch_version = (\\d+);$', re.M)
version[2] = patchVersion.search(txt)
if not version[0] or not version[1] or not version[2]:
raise Errors.WafError('Corrupted: cannot detect websocket version')
self.env['WEBSOCKET_VERSION'] = [i.group(1) for i in version]
# todo: version checking, if necessary
self.end_msg('.'.join(self.env['WEBSOCKET_VERSION']))
self.env['INCLUDES_WEBSOCKET'] = websocketDir.parent.abspath()
self.env['HAVE_WEBSOCKET'] = True
self.define('HAVE_WEBSOCKET', 1)
self.define('_WEBSOCKETPP_CPP11_STL_', 1)
except Errors.WafError as error:
if isMandatory:
self.end_msg(str(error), color='RED')
Logs.warn('If you are using git NFD repository, checkout websocketpp submodule: ')
Logs.warn(' git submodule init && git submodule update')
Logs.warn('Otherwise, manually download and extract websocketpp library:')
Logs.warn(' mkdir websocketpp')
Logs.warn(' curl -L https://github.com/zaphoyd/websocketpp/archive/0.5.1.tar.gz > websocket.tar.gz')
Logs.warn(' tar zxf websocket.tar.gz -C websocketpp/ --strip 1')
Logs.warn('Alternatively, WebSocket support can be disabled with --without-websocket')
self.fatal("The configuration failed")
else:
self.end_msg(str(error))
| gpl-3.0 | -5,691,626,072,744,170,000 | 37.468354 | 115 | 0.630141 | false |
joshpelkey/cmap-parse | cmap_parse.py | 1 | 11702 | ##
#
# cmap_parse.py
# An attempt to parse concept maps, exported from cmap tools...take one
#
# Copyright 2016 Josh Pelkey
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
# in compliance with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software distributed under the License
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied. See the License for the specific language governing permissions and limitations under the
# License.
#
##
import glob
import re
import os
import itertools
import networkx as nx
def CxlConversion (file):
# get the concepts, linking phrases, and connections
concepts = {}
linking_phrases = {}
connections = []
concepts_linked = []
for line in f:
if "concept id=" in line:
concept = re.findall (r'"([^"]*)"', line)
concepts[concept[0]] = concept[1]
# get the linking phrases
if "linking-phrase id=" in line:
linking_phrase = re.findall (r'"([^"]*)"', line)
linking_phrases[linking_phrase[0]] = linking_phrase[1]
# get the connections
if "connection id=" in line:
connections.append (re.findall (r'"([^"]*)"', line))
# cycle through the linking phrase list, find all lines matching from-id and to-id
# edges are combinations of from-id and to-id
for key in linking_phrases:
from_links = []
to_links = []
for connection in connections:
# if linking phrase is in the from-id (linking phrase at beginning)
if key == connection[1]:
from_links.append ([linking_phrases[key],concepts[connection[2]]])
# if linking phrase is in the to-id (linking phrase at the end)
if key == connection[2]:
to_links.append ([concepts[connection[1]], linking_phrases[key]])
#print to_links
#print from_links
#print "--"
# now combine the lists, to_links to from_links
for to_link in to_links:
for from_link in from_links:
concepts_linked.append ([to_link[0], to_link[1], from_link[1]])
return concepts_linked
def CmapParse (cmap_files, result, filenames, root_concept, export_concepts):
# store all concepts to print later
all_concepts = []
# open the result file to write output
rfile = open(result, 'w')
rfile.write('Filename\t Num Concepts\t Num Hierarchies\t Highest Hierarchy\t Num Crosslinks\t\n\n')
# iterate over all the files and start doing stuffs
for index, cmap_file in enumerate(cmap_files):
# create an empty Multi-directed graph
G = nx.MultiDiGraph ()
# open a cmap text file and begin writing results
global f
f = open (cmap_file)
rfile.write(filenames[index] + '\t')
# if file extension cxl, do this fun conversion
textFormatCorrect = True
if os.path.splitext(filenames[index])[1][1:] == "cxl":
concepts_linked = CxlConversion(f)
for edge in concepts_linked:
G.add_edge (edge[0].lower(), edge[2].lower(), link=edge[1].lower())
else:
# split the lines in to a list
lines = ((f.read ()).splitlines ())
# iterate over the list and split each line
# in to individual lists, delimited by tab
for line in lines:
edge = line.split ('\t')
# break if not 3 items per line
if len(edge) != 3:
rfile.write('>> Text file not formatted correctly.\n')
textFormatCorrect = False
break
G.add_edge (edge[0].lower(), edge[2].lower(), link=edge[1].lower())
# if the file had a line without 3 items, break completely
if not textFormatCorrect:
continue
# if 'sustainability' isn't a concept, fail
if root_concept.lower() not in G:
rfile.write('>> ' + root_concept.lower() + ' not a concept in the map.\n')
continue
# store first-level hierarchy concepts
hierarchy_list = G.successors (root_concept.lower())
# iterate through the main graph and set hierarchy to zero for now
for x in G:
G.node[x]['hier'] = 0
# iterate through the top hierarchy in the main graph and set these first-level hierarchy
# concepts to an incrementing integer
hierIter = 1
for x in hierarchy_list:
G.node[x]['hier'] = hierIter
hierIter += 1
# number of concepts is the number of nodes
# minus the root node
num_concepts = G.order () - 1
# hierarchy is the out degree of the root node
# we assume the root is 'sustainabiliy'
hierarchy = G.out_degree (root_concept.lower())
# look at all paths from sustainability to all
# other nodes. no repeated nodes (cycles)
paths_list = []
for n in G.nodes ():
for path in nx.all_simple_paths (G, source=root_concept.lower(), target=n):
paths_list.append (path)
# highest hierarchy defined here as the max path length
# this is a bit different than how it's done manually
# discuss later
highest_hier = max (len (x) for x in paths_list) - 1
# let's make subgraphs of all hierarchies
# we can use these subgraphs to do some
# operations and check out cross links
subgraph_list = []
for x in hierarchy_list:
subgraph = nx.MultiDiGraph ()
connected_nodes = []
for y in G.nodes ():
if nx.has_path (G, x, y):
connected_nodes.append (y)
subgraph = G.subgraph(connected_nodes).copy ()
subgraph.graph['name'] = x
subgraph_list.append (subgraph)
# for node not in first-level hierarchy, check which
# of the first-level concepts is closest (shortest path)
# and then label it with that hierarchy
fail = False
for n in G.nodes ():
shortest_path = 0
assoc_hier = ''
if n not in (hierarchy_list, root_concept.lower ()):
path_list = []
for y in hierarchy_list:
if nx.has_path (G, y, n):
path_list = nx.shortest_path (G, y, n)
if shortest_path == 0:
assoc_hier = y
shortest_path = len (path_list)
else:
if (len (path_list) < shortest_path):
assoc_hier = y
shortest_path = len (path_list)
if assoc_hier:
G.node[n]['hier'] = G.node[assoc_hier]['hier']
#print G.node[n]['hier']
else:
fail = True
rfile.write('>> One or more concepts not connected to first-level hierarchy. \n')
break
# if exporting concepts, store the concepts
if export_concepts:
all_concepts.append(G.nodes())
# a concept was not connected to a first-level hierarchy
# move on to the next concept map
if fail:
continue
# now i need to find all edges that have
# two hier node attributes that don't match.
# these are crosslinks
total_crosslinks = 0
for x in G.edges():
if ((G.node[x[0]]['hier']) != 0) and ((G.node[x[1]]['hier']) != 0):
if G.node[x[0]]['hier'] != G.node[x[1]]['hier']:
#print (str (x[0]) + ' ---- ' + str (x[1]) + ' hier: ' + str (G.node[x[0]]['hier']) + ' ---- ' + str (G.node[x[1]]['hier']))
total_crosslinks += 1
# print out the stuffs
rfile.write(str (num_concepts) + '\t')
rfile.write(str (hierarchy) + '\t')
rfile.write(str (highest_hier) + '\t')
rfile.write(str (total_crosslinks) + '\t')
# make it pretty
rfile.write('\n')
# show me cycles
#print ('>> Cycles: ' + str (nx.simple_cycles (G)))
# close up the cmap file
f.close()
# if exporting concepts, print them out
rfile.write('\n')
if export_concepts:
rfile.write('Filename\t')
for filename in filenames:
rfile.write(filename + '\t')
rfile.write('\n')
rfile.write('Concepts')
# transpose to columns and write
transposed_all_concepts = map(lambda *row: list(row), *all_concepts)
for x, concepts in enumerate(transposed_all_concepts):
rfile.write('\t')
for concept in transposed_all_concepts[x]:
if concept:
#stripping these 
 characters, some cxl files seem to have for some reason
rfile.write(concept.replace('
', ' ') + '\t')
else:
rfile.write('\t')
rfile.write('\n')
# close the result file
rfile.close()
# eof.zomg
| apache-2.0 | -3,076,254,453,924,928,000 | 43.664122 | 164 | 0.443941 | false |
loop1024/pymo-global | android/pgs4a-0.9.6/python-install/lib/python2.7/zipfile.py | 4 | 54030 | """
Read and write ZIP files.
"""
import struct, os, time, sys, shutil
import binascii, cStringIO, stat
import io
import re
try:
import zlib # We may need its compression method
crc32 = zlib.crc32
except ImportError:
zlib = None
crc32 = binascii.crc32
__all__ = ["BadZipfile", "error", "ZIP_STORED", "ZIP_DEFLATED", "is_zipfile",
"ZipInfo", "ZipFile", "PyZipFile", "LargeZipFile" ]
class BadZipfile(Exception):
pass
class LargeZipFile(Exception):
"""
Raised when writing a zipfile, the zipfile requires ZIP64 extensions
and those extensions are disabled.
"""
error = BadZipfile # The exception raised by this module
ZIP64_LIMIT = (1 << 31) - 1
ZIP_FILECOUNT_LIMIT = 1 << 16
ZIP_MAX_COMMENT = (1 << 16) - 1
# constants for Zip file compression methods
ZIP_STORED = 0
ZIP_DEFLATED = 8
# Other ZIP compression methods not supported
# Below are some formats and associated data for reading/writing headers using
# the struct module. The names and structures of headers/records are those used
# in the PKWARE description of the ZIP file format:
# http://www.pkware.com/documents/casestudies/APPNOTE.TXT
# (URL valid as of January 2008)
# The "end of central directory" structure, magic number, size, and indices
# (section V.I in the format document)
structEndArchive = "<4s4H2LH"
stringEndArchive = "PK\005\006"
sizeEndCentDir = struct.calcsize(structEndArchive)
_ECD_SIGNATURE = 0
_ECD_DISK_NUMBER = 1
_ECD_DISK_START = 2
_ECD_ENTRIES_THIS_DISK = 3
_ECD_ENTRIES_TOTAL = 4
_ECD_SIZE = 5
_ECD_OFFSET = 6
_ECD_COMMENT_SIZE = 7
# These last two indices are not part of the structure as defined in the
# spec, but they are used internally by this module as a convenience
_ECD_COMMENT = 8
_ECD_LOCATION = 9
# The "central directory" structure, magic number, size, and indices
# of entries in the structure (section V.F in the format document)
structCentralDir = "<4s4B4HL2L5H2L"
stringCentralDir = "PK\001\002"
sizeCentralDir = struct.calcsize(structCentralDir)
# indexes of entries in the central directory structure
_CD_SIGNATURE = 0
_CD_CREATE_VERSION = 1
_CD_CREATE_SYSTEM = 2
_CD_EXTRACT_VERSION = 3
_CD_EXTRACT_SYSTEM = 4
_CD_FLAG_BITS = 5
_CD_COMPRESS_TYPE = 6
_CD_TIME = 7
_CD_DATE = 8
_CD_CRC = 9
_CD_COMPRESSED_SIZE = 10
_CD_UNCOMPRESSED_SIZE = 11
_CD_FILENAME_LENGTH = 12
_CD_EXTRA_FIELD_LENGTH = 13
_CD_COMMENT_LENGTH = 14
_CD_DISK_NUMBER_START = 15
_CD_INTERNAL_FILE_ATTRIBUTES = 16
_CD_EXTERNAL_FILE_ATTRIBUTES = 17
_CD_LOCAL_HEADER_OFFSET = 18
# The "local file header" structure, magic number, size, and indices
# (section V.A in the format document)
structFileHeader = "<4s2B4HL2L2H"
stringFileHeader = "PK\003\004"
sizeFileHeader = struct.calcsize(structFileHeader)
_FH_SIGNATURE = 0
_FH_EXTRACT_VERSION = 1
_FH_EXTRACT_SYSTEM = 2
_FH_GENERAL_PURPOSE_FLAG_BITS = 3
_FH_COMPRESSION_METHOD = 4
_FH_LAST_MOD_TIME = 5
_FH_LAST_MOD_DATE = 6
_FH_CRC = 7
_FH_COMPRESSED_SIZE = 8
_FH_UNCOMPRESSED_SIZE = 9
_FH_FILENAME_LENGTH = 10
_FH_EXTRA_FIELD_LENGTH = 11
# The "Zip64 end of central directory locator" structure, magic number, and size
structEndArchive64Locator = "<4sLQL"
stringEndArchive64Locator = "PK\x06\x07"
sizeEndCentDir64Locator = struct.calcsize(structEndArchive64Locator)
# The "Zip64 end of central directory" record, magic number, size, and indices
# (section V.G in the format document)
structEndArchive64 = "<4sQ2H2L4Q"
stringEndArchive64 = "PK\x06\x06"
sizeEndCentDir64 = struct.calcsize(structEndArchive64)
_CD64_SIGNATURE = 0
_CD64_DIRECTORY_RECSIZE = 1
_CD64_CREATE_VERSION = 2
_CD64_EXTRACT_VERSION = 3
_CD64_DISK_NUMBER = 4
_CD64_DISK_NUMBER_START = 5
_CD64_NUMBER_ENTRIES_THIS_DISK = 6
_CD64_NUMBER_ENTRIES_TOTAL = 7
_CD64_DIRECTORY_SIZE = 8
_CD64_OFFSET_START_CENTDIR = 9
def _check_zipfile(fp):
try:
if _EndRecData(fp):
return True # file has correct magic number
except IOError:
pass
return False
def is_zipfile(filename):
"""Quickly see if a file is a ZIP file by checking the magic number.
The filename argument may be a file or file-like object too.
"""
result = False
try:
if hasattr(filename, "read"):
result = _check_zipfile(fp=filename)
else:
with open(filename, "rb") as fp:
result = _check_zipfile(fp)
except IOError:
pass
return result
def _EndRecData64(fpin, offset, endrec):
"""
Read the ZIP64 end-of-archive records and use that to update endrec
"""
try:
fpin.seek(offset - sizeEndCentDir64Locator, 2)
except IOError:
# If the seek fails, the file is not large enough to contain a ZIP64
# end-of-archive record, so just return the end record we were given.
return endrec
data = fpin.read(sizeEndCentDir64Locator)
sig, diskno, reloff, disks = struct.unpack(structEndArchive64Locator, data)
if sig != stringEndArchive64Locator:
return endrec
if diskno != 0 or disks != 1:
raise BadZipfile("zipfiles that span multiple disks are not supported")
# Assume no 'zip64 extensible data'
fpin.seek(offset - sizeEndCentDir64Locator - sizeEndCentDir64, 2)
data = fpin.read(sizeEndCentDir64)
sig, sz, create_version, read_version, disk_num, disk_dir, \
dircount, dircount2, dirsize, diroffset = \
struct.unpack(structEndArchive64, data)
if sig != stringEndArchive64:
return endrec
# Update the original endrec using data from the ZIP64 record
endrec[_ECD_SIGNATURE] = sig
endrec[_ECD_DISK_NUMBER] = disk_num
endrec[_ECD_DISK_START] = disk_dir
endrec[_ECD_ENTRIES_THIS_DISK] = dircount
endrec[_ECD_ENTRIES_TOTAL] = dircount2
endrec[_ECD_SIZE] = dirsize
endrec[_ECD_OFFSET] = diroffset
return endrec
def _EndRecData(fpin):
"""Return data from the "End of Central Directory" record, or None.
The data is a list of the nine items in the ZIP "End of central dir"
record followed by a tenth item, the file seek offset of this record."""
# Determine file size
fpin.seek(0, 2)
filesize = fpin.tell()
# Check to see if this is ZIP file with no archive comment (the
# "end of central directory" structure should be the last item in the
# file if this is the case).
try:
fpin.seek(-sizeEndCentDir, 2)
except IOError:
return None
data = fpin.read()
if data[0:4] == stringEndArchive and data[-2:] == "\000\000":
# the signature is correct and there's no comment, unpack structure
endrec = struct.unpack(structEndArchive, data)
endrec=list(endrec)
# Append a blank comment and record start offset
endrec.append("")
endrec.append(filesize - sizeEndCentDir)
# Try to read the "Zip64 end of central directory" structure
return _EndRecData64(fpin, -sizeEndCentDir, endrec)
# Either this is not a ZIP file, or it is a ZIP file with an archive
# comment. Search the end of the file for the "end of central directory"
# record signature. The comment is the last item in the ZIP file and may be
# up to 64K long. It is assumed that the "end of central directory" magic
# number does not appear in the comment.
maxCommentStart = max(filesize - (1 << 16) - sizeEndCentDir, 0)
fpin.seek(maxCommentStart, 0)
data = fpin.read()
start = data.rfind(stringEndArchive)
if start >= 0:
# found the magic number; attempt to unpack and interpret
recData = data[start:start+sizeEndCentDir]
endrec = list(struct.unpack(structEndArchive, recData))
comment = data[start+sizeEndCentDir:]
# check that comment length is correct
if endrec[_ECD_COMMENT_SIZE] == len(comment):
# Append the archive comment and start offset
endrec.append(comment)
endrec.append(maxCommentStart + start)
# Try to read the "Zip64 end of central directory" structure
return _EndRecData64(fpin, maxCommentStart + start - filesize,
endrec)
# Unable to find a valid end of central directory structure
return
class ZipInfo (object):
"""Class with attributes describing each file in the ZIP archive."""
__slots__ = (
'orig_filename',
'filename',
'date_time',
'compress_type',
'comment',
'extra',
'create_system',
'create_version',
'extract_version',
'reserved',
'flag_bits',
'volume',
'internal_attr',
'external_attr',
'header_offset',
'CRC',
'compress_size',
'file_size',
'_raw_time',
)
def __init__(self, filename="NoName", date_time=(1980,1,1,0,0,0)):
self.orig_filename = filename # Original file name in archive
# Terminate the file name at the first null byte. Null bytes in file
# names are used as tricks by viruses in archives.
null_byte = filename.find(chr(0))
if null_byte >= 0:
filename = filename[0:null_byte]
# This is used to ensure paths in generated ZIP files always use
# forward slashes as the directory separator, as required by the
# ZIP format specification.
if os.sep != "/" and os.sep in filename:
filename = filename.replace(os.sep, "/")
self.filename = filename # Normalized file name
self.date_time = date_time # year, month, day, hour, min, sec
# Standard values:
self.compress_type = ZIP_STORED # Type of compression for the file
self.comment = "" # Comment for each file
self.extra = "" # ZIP extra data
if sys.platform == 'win32':
self.create_system = 0 # System which created ZIP archive
else:
# Assume everything else is unix-y
self.create_system = 3 # System which created ZIP archive
self.create_version = 20 # Version which created ZIP archive
self.extract_version = 20 # Version needed to extract archive
self.reserved = 0 # Must be zero
self.flag_bits = 0 # ZIP flag bits
self.volume = 0 # Volume number of file header
self.internal_attr = 0 # Internal attributes
self.external_attr = 0 # External file attributes
# Other attributes are set by class ZipFile:
# header_offset Byte offset to the file header
# CRC CRC-32 of the uncompressed file
# compress_size Size of the compressed file
# file_size Size of the uncompressed file
def FileHeader(self):
"""Return the per-file header as a string."""
dt = self.date_time
dosdate = (dt[0] - 1980) << 9 | dt[1] << 5 | dt[2]
dostime = dt[3] << 11 | dt[4] << 5 | (dt[5] // 2)
if self.flag_bits & 0x08:
# Set these to zero because we write them after the file data
CRC = compress_size = file_size = 0
else:
CRC = self.CRC
compress_size = self.compress_size
file_size = self.file_size
extra = self.extra
if file_size > ZIP64_LIMIT or compress_size > ZIP64_LIMIT:
# File is larger than what fits into a 4 byte integer,
# fall back to the ZIP64 extension
fmt = '<HHQQ'
extra = extra + struct.pack(fmt,
1, struct.calcsize(fmt)-4, file_size, compress_size)
file_size = 0xffffffff
compress_size = 0xffffffff
self.extract_version = max(45, self.extract_version)
self.create_version = max(45, self.extract_version)
filename, flag_bits = self._encodeFilenameFlags()
header = struct.pack(structFileHeader, stringFileHeader,
self.extract_version, self.reserved, flag_bits,
self.compress_type, dostime, dosdate, CRC,
compress_size, file_size,
len(filename), len(extra))
return header + filename + extra
def _encodeFilenameFlags(self):
if isinstance(self.filename, unicode):
try:
return self.filename.encode('ascii'), self.flag_bits
except UnicodeEncodeError:
return self.filename.encode('utf-8'), self.flag_bits | 0x800
else:
return self.filename, self.flag_bits
def _decodeFilename(self):
if self.flag_bits & 0x800:
return self.filename.decode('utf-8')
else:
return self.filename
def _decodeExtra(self):
# Try to decode the extra field.
extra = self.extra
unpack = struct.unpack
while len(extra) >= 4:
tp, ln = unpack('<HH', extra[:4])
if tp == 1:
if ln >= 24:
counts = unpack('<QQQ', extra[4:28])
elif ln == 16:
counts = unpack('<QQ', extra[4:20])
elif ln == 8:
counts = unpack('<Q', extra[4:12])
elif ln == 0:
counts = ()
else:
raise RuntimeError, "Corrupt extra field %s"%(ln,)
idx = 0
# ZIP64 extension (large files and/or large archives)
if self.file_size in (0xffffffffffffffffL, 0xffffffffL):
self.file_size = counts[idx]
idx += 1
if self.compress_size == 0xFFFFFFFFL:
self.compress_size = counts[idx]
idx += 1
if self.header_offset == 0xffffffffL:
old = self.header_offset
self.header_offset = counts[idx]
idx+=1
extra = extra[ln+4:]
class _ZipDecrypter:
"""Class to handle decryption of files stored within a ZIP archive.
ZIP supports a password-based form of encryption. Even though known
plaintext attacks have been found against it, it is still useful
to be able to get data out of such a file.
Usage:
zd = _ZipDecrypter(mypwd)
plain_char = zd(cypher_char)
plain_text = map(zd, cypher_text)
"""
def _GenerateCRCTable():
"""Generate a CRC-32 table.
ZIP encryption uses the CRC32 one-byte primitive for scrambling some
internal keys. We noticed that a direct implementation is faster than
relying on binascii.crc32().
"""
poly = 0xedb88320
table = [0] * 256
for i in range(256):
crc = i
for j in range(8):
if crc & 1:
crc = ((crc >> 1) & 0x7FFFFFFF) ^ poly
else:
crc = ((crc >> 1) & 0x7FFFFFFF)
table[i] = crc
return table
crctable = _GenerateCRCTable()
def _crc32(self, ch, crc):
"""Compute the CRC32 primitive on one byte."""
return ((crc >> 8) & 0xffffff) ^ self.crctable[(crc ^ ord(ch)) & 0xff]
def __init__(self, pwd):
self.key0 = 305419896
self.key1 = 591751049
self.key2 = 878082192
for p in pwd:
self._UpdateKeys(p)
def _UpdateKeys(self, c):
self.key0 = self._crc32(c, self.key0)
self.key1 = (self.key1 + (self.key0 & 255)) & 4294967295
self.key1 = (self.key1 * 134775813 + 1) & 4294967295
self.key2 = self._crc32(chr((self.key1 >> 24) & 255), self.key2)
def __call__(self, c):
"""Decrypt a single character."""
c = ord(c)
k = self.key2 | 2
c = c ^ (((k * (k^1)) >> 8) & 255)
c = chr(c)
self._UpdateKeys(c)
return c
class ZipExtFile(io.BufferedIOBase):
"""File-like object for reading an archive member.
Is returned by ZipFile.open().
"""
# Max size supported by decompressor.
MAX_N = 1 << 31 - 1
# Read from compressed files in 4k blocks.
MIN_READ_SIZE = 4096
# Search for universal newlines or line chunks.
PATTERN = re.compile(r'^(?P<chunk>[^\r\n]+)|(?P<newline>\n|\r\n?)')
def __init__(self, fileobj, mode, zipinfo, decrypter=None):
self._fileobj = fileobj
self._decrypter = decrypter
self._compress_type = zipinfo.compress_type
self._compress_size = zipinfo.compress_size
self._compress_left = zipinfo.compress_size
if self._compress_type == ZIP_DEFLATED:
self._decompressor = zlib.decompressobj(-15)
self._unconsumed = ''
self._readbuffer = ''
self._offset = 0
self._universal = 'U' in mode
self.newlines = None
# Adjust read size for encrypted files since the first 12 bytes
# are for the encryption/password information.
if self._decrypter is not None:
self._compress_left -= 12
self.mode = mode
self.name = zipinfo.filename
if hasattr(zipinfo, 'CRC'):
self._expected_crc = zipinfo.CRC
self._running_crc = crc32(b'') & 0xffffffff
else:
self._expected_crc = None
def readline(self, limit=-1):
"""Read and return a line from the stream.
If limit is specified, at most limit bytes will be read.
"""
if not self._universal and limit < 0:
# Shortcut common case - newline found in buffer.
i = self._readbuffer.find('\n', self._offset) + 1
if i > 0:
line = self._readbuffer[self._offset: i]
self._offset = i
return line
if not self._universal:
return io.BufferedIOBase.readline(self, limit)
line = ''
while limit < 0 or len(line) < limit:
readahead = self.peek(2)
if readahead == '':
return line
#
# Search for universal newlines or line chunks.
#
# The pattern returns either a line chunk or a newline, but not
# both. Combined with peek(2), we are assured that the sequence
# '\r\n' is always retrieved completely and never split into
# separate newlines - '\r', '\n' due to coincidental readaheads.
#
match = self.PATTERN.search(readahead)
newline = match.group('newline')
if newline is not None:
if self.newlines is None:
self.newlines = []
if newline not in self.newlines:
self.newlines.append(newline)
self._offset += len(newline)
return line + '\n'
chunk = match.group('chunk')
if limit >= 0:
chunk = chunk[: limit - len(line)]
self._offset += len(chunk)
line += chunk
return line
def peek(self, n=1):
"""Returns buffered bytes without advancing the position."""
if n > len(self._readbuffer) - self._offset:
chunk = self.read(n)
self._offset -= len(chunk)
# Return up to 512 bytes to reduce allocation overhead for tight loops.
return self._readbuffer[self._offset: self._offset + 512]
def readable(self):
return True
def read(self, n=-1):
"""Read and return up to n bytes.
If the argument is omitted, None, or negative, data is read and returned until EOF is reached..
"""
buf = ''
if n is None:
n = -1
while True:
if n < 0:
data = self.read1(n)
elif n > len(buf):
data = self.read1(n - len(buf))
else:
return buf
if len(data) == 0:
return buf
buf += data
def _update_crc(self, newdata, eof):
# Update the CRC using the given data.
if self._expected_crc is None:
# No need to compute the CRC if we don't have a reference value
return
self._running_crc = crc32(newdata, self._running_crc) & 0xffffffff
# Check the CRC if we're at the end of the file
if eof and self._running_crc != self._expected_crc:
raise BadZipfile("Bad CRC-32 for file %r" % self.name)
def read1(self, n):
"""Read up to n bytes with at most one read() system call."""
# Simplify algorithm (branching) by transforming negative n to large n.
if n < 0 or n is None:
n = self.MAX_N
# Bytes available in read buffer.
len_readbuffer = len(self._readbuffer) - self._offset
# Read from file.
if self._compress_left > 0 and n > len_readbuffer + len(self._unconsumed):
nbytes = n - len_readbuffer - len(self._unconsumed)
nbytes = max(nbytes, self.MIN_READ_SIZE)
nbytes = min(nbytes, self._compress_left)
data = self._fileobj.read(nbytes)
self._compress_left -= len(data)
if data and self._decrypter is not None:
data = ''.join(map(self._decrypter, data))
if self._compress_type == ZIP_STORED:
self._update_crc(data, eof=(self._compress_left==0))
self._readbuffer = self._readbuffer[self._offset:] + data
self._offset = 0
else:
# Prepare deflated bytes for decompression.
self._unconsumed += data
# Handle unconsumed data.
if (len(self._unconsumed) > 0 and n > len_readbuffer and
self._compress_type == ZIP_DEFLATED):
data = self._decompressor.decompress(
self._unconsumed,
max(n - len_readbuffer, self.MIN_READ_SIZE)
)
self._unconsumed = self._decompressor.unconsumed_tail
eof = len(self._unconsumed) == 0 and self._compress_left == 0
if eof:
data += self._decompressor.flush()
self._update_crc(data, eof=eof)
self._readbuffer = self._readbuffer[self._offset:] + data
self._offset = 0
# Read from buffer.
data = self._readbuffer[self._offset: self._offset + n]
self._offset += len(data)
return data
class ZipFile:
""" Class with methods to open, read, write, close, list zip files.
z = ZipFile(file, mode="r", compression=ZIP_STORED, allowZip64=False)
file: Either the path to the file, or a file-like object.
If it is a path, the file will be opened and closed by ZipFile.
mode: The mode can be either read "r", write "w" or append "a".
compression: ZIP_STORED (no compression) or ZIP_DEFLATED (requires zlib).
allowZip64: if True ZipFile will create files with ZIP64 extensions when
needed, otherwise it will raise an exception when this would
be necessary.
"""
fp = None # Set here since __del__ checks it
def __init__(self, file, mode="r", compression=ZIP_STORED, allowZip64=False):
"""Open the ZIP file with mode read "r", write "w" or append "a"."""
if mode not in ("r", "w", "a"):
raise RuntimeError('ZipFile() requires mode "r", "w", or "a"')
if compression == ZIP_STORED:
pass
elif compression == ZIP_DEFLATED:
if not zlib:
raise RuntimeError,\
"Compression requires the (missing) zlib module"
else:
raise RuntimeError, "That compression method is not supported"
self._allowZip64 = allowZip64
self._didModify = False
self.debug = 0 # Level of printing: 0 through 3
self.NameToInfo = {} # Find file info given name
self.filelist = [] # List of ZipInfo instances for archive
self.compression = compression # Method of compression
self.mode = key = mode.replace('b', '')[0]
self.pwd = None
self.comment = ''
# Check if we were passed a file-like object
if isinstance(file, basestring):
self._filePassed = 0
self.filename = file
modeDict = {'r' : 'rb', 'w': 'wb', 'a' : 'r+b'}
try:
self.fp = open(file, modeDict[mode])
except IOError:
if mode == 'a':
mode = key = 'w'
self.fp = open(file, modeDict[mode])
else:
raise
else:
self._filePassed = 1
self.fp = file
self.filename = getattr(file, 'name', None)
if key == 'r':
self._GetContents()
elif key == 'w':
# set the modified flag so central directory gets written
# even if no files are added to the archive
self._didModify = True
elif key == 'a':
try:
# See if file is a zip file
self._RealGetContents()
# seek to start of directory and overwrite
self.fp.seek(self.start_dir, 0)
except BadZipfile:
# file is not a zip file, just append
self.fp.seek(0, 2)
# set the modified flag so central directory gets written
# even if no files are added to the archive
self._didModify = True
else:
if not self._filePassed:
self.fp.close()
self.fp = None
raise RuntimeError, 'Mode must be "r", "w" or "a"'
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
self.close()
def _GetContents(self):
"""Read the directory, making sure we close the file if the format
is bad."""
try:
self._RealGetContents()
except BadZipfile:
if not self._filePassed:
self.fp.close()
self.fp = None
raise
def _RealGetContents(self):
"""Read in the table of contents for the ZIP file."""
fp = self.fp
try:
endrec = _EndRecData(fp)
except IOError:
raise BadZipfile("File is not a zip file")
if not endrec:
raise BadZipfile, "File is not a zip file"
if self.debug > 1:
print endrec
size_cd = endrec[_ECD_SIZE] # bytes in central directory
offset_cd = endrec[_ECD_OFFSET] # offset of central directory
self.comment = endrec[_ECD_COMMENT] # archive comment
# "concat" is zero, unless zip was concatenated to another file
concat = endrec[_ECD_LOCATION] - size_cd - offset_cd
if endrec[_ECD_SIGNATURE] == stringEndArchive64:
# If Zip64 extension structures are present, account for them
concat -= (sizeEndCentDir64 + sizeEndCentDir64Locator)
if self.debug > 2:
inferred = concat + offset_cd
print "given, inferred, offset", offset_cd, inferred, concat
# self.start_dir: Position of start of central directory
self.start_dir = offset_cd + concat
fp.seek(self.start_dir, 0)
data = fp.read(size_cd)
fp = cStringIO.StringIO(data)
total = 0
while total < size_cd:
centdir = fp.read(sizeCentralDir)
if centdir[0:4] != stringCentralDir:
raise BadZipfile, "Bad magic number for central directory"
centdir = struct.unpack(structCentralDir, centdir)
if self.debug > 2:
print centdir
filename = fp.read(centdir[_CD_FILENAME_LENGTH])
# Create ZipInfo instance to store file information
x = ZipInfo(filename)
x.extra = fp.read(centdir[_CD_EXTRA_FIELD_LENGTH])
x.comment = fp.read(centdir[_CD_COMMENT_LENGTH])
x.header_offset = centdir[_CD_LOCAL_HEADER_OFFSET]
(x.create_version, x.create_system, x.extract_version, x.reserved,
x.flag_bits, x.compress_type, t, d,
x.CRC, x.compress_size, x.file_size) = centdir[1:12]
x.volume, x.internal_attr, x.external_attr = centdir[15:18]
# Convert date/time code to (year, month, day, hour, min, sec)
x._raw_time = t
x.date_time = ( (d>>9)+1980, (d>>5)&0xF, d&0x1F,
t>>11, (t>>5)&0x3F, (t&0x1F) * 2 )
x._decodeExtra()
x.header_offset = x.header_offset + concat
x.filename = x._decodeFilename()
self.filelist.append(x)
self.NameToInfo[x.filename] = x
# update total bytes read from central directory
total = (total + sizeCentralDir + centdir[_CD_FILENAME_LENGTH]
+ centdir[_CD_EXTRA_FIELD_LENGTH]
+ centdir[_CD_COMMENT_LENGTH])
if self.debug > 2:
print "total", total
def namelist(self):
"""Return a list of file names in the archive."""
l = []
for data in self.filelist:
l.append(data.filename)
return l
def infolist(self):
"""Return a list of class ZipInfo instances for files in the
archive."""
return self.filelist
def printdir(self):
"""Print a table of contents for the zip file."""
print "%-46s %19s %12s" % ("File Name", "Modified ", "Size")
for zinfo in self.filelist:
date = "%d-%02d-%02d %02d:%02d:%02d" % zinfo.date_time[:6]
print "%-46s %s %12d" % (zinfo.filename, date, zinfo.file_size)
def testzip(self):
"""Read all the files and check the CRC."""
chunk_size = 2 ** 20
for zinfo in self.filelist:
try:
# Read by chunks, to avoid an OverflowError or a
# MemoryError with very large embedded files.
f = self.open(zinfo.filename, "r")
while f.read(chunk_size): # Check CRC-32
pass
except BadZipfile:
return zinfo.filename
def getinfo(self, name):
"""Return the instance of ZipInfo given 'name'."""
info = self.NameToInfo.get(name)
if info is None:
raise KeyError(
'There is no item named %r in the archive' % name)
return info
def setpassword(self, pwd):
"""Set default password for encrypted files."""
self.pwd = pwd
def read(self, name, pwd=None):
"""Return file bytes (as a string) for name."""
return self.open(name, "r", pwd).read()
def open(self, name, mode="r", pwd=None):
"""Return file-like object for 'name'."""
if mode not in ("r", "U", "rU"):
raise RuntimeError, 'open() requires mode "r", "U", or "rU"'
if not self.fp:
raise RuntimeError, \
"Attempt to read ZIP archive that was already closed"
# Only open a new file for instances where we were not
# given a file object in the constructor
if self._filePassed:
zef_file = self.fp
else:
zef_file = open(self.filename, 'rb')
# Make sure we have an info object
if isinstance(name, ZipInfo):
# 'name' is already an info object
zinfo = name
else:
# Get info object for name
zinfo = self.getinfo(name)
zef_file.seek(zinfo.header_offset, 0)
# Skip the file header:
fheader = zef_file.read(sizeFileHeader)
if fheader[0:4] != stringFileHeader:
raise BadZipfile, "Bad magic number for file header"
fheader = struct.unpack(structFileHeader, fheader)
fname = zef_file.read(fheader[_FH_FILENAME_LENGTH])
if fheader[_FH_EXTRA_FIELD_LENGTH]:
zef_file.read(fheader[_FH_EXTRA_FIELD_LENGTH])
if fname != zinfo.orig_filename:
raise BadZipfile, \
'File name in directory "%s" and header "%s" differ.' % (
zinfo.orig_filename, fname)
# check for encrypted flag & handle password
is_encrypted = zinfo.flag_bits & 0x1
zd = None
if is_encrypted:
if not pwd:
pwd = self.pwd
if not pwd:
raise RuntimeError, "File %s is encrypted, " \
"password required for extraction" % name
zd = _ZipDecrypter(pwd)
# The first 12 bytes in the cypher stream is an encryption header
# used to strengthen the algorithm. The first 11 bytes are
# completely random, while the 12th contains the MSB of the CRC,
# or the MSB of the file time depending on the header type
# and is used to check the correctness of the password.
bytes = zef_file.read(12)
h = map(zd, bytes[0:12])
if zinfo.flag_bits & 0x8:
# compare against the file type from extended local headers
check_byte = (zinfo._raw_time >> 8) & 0xff
else:
# compare against the CRC otherwise
check_byte = (zinfo.CRC >> 24) & 0xff
if ord(h[11]) != check_byte:
raise RuntimeError("Bad password for file", name)
return ZipExtFile(zef_file, mode, zinfo, zd)
def extract(self, member, path=None, pwd=None):
"""Extract a member from the archive to the current working directory,
using its full name. Its file information is extracted as accurately
as possible. `member' may be a filename or a ZipInfo object. You can
specify a different directory using `path'.
"""
if not isinstance(member, ZipInfo):
member = self.getinfo(member)
if path is None:
path = os.getcwd()
return self._extract_member(member, path, pwd)
def extractall(self, path=None, members=None, pwd=None):
"""Extract all members from the archive to the current working
directory. `path' specifies a different directory to extract to.
`members' is optional and must be a subset of the list returned
by namelist().
"""
if members is None:
members = self.namelist()
for zipinfo in members:
self.extract(zipinfo, path, pwd)
def _extract_member(self, member, targetpath, pwd):
"""Extract the ZipInfo object 'member' to a physical
file on the path targetpath.
"""
# build the destination pathname, replacing
# forward slashes to platform specific separators.
# Strip trailing path separator, unless it represents the root.
if (targetpath[-1:] in (os.path.sep, os.path.altsep)
and len(os.path.splitdrive(targetpath)[1]) > 1):
targetpath = targetpath[:-1]
# don't include leading "/" from file name if present
if member.filename[0] == '/':
targetpath = os.path.join(targetpath, member.filename[1:])
else:
targetpath = os.path.join(targetpath, member.filename)
targetpath = os.path.normpath(targetpath)
# Create all upper directories if necessary.
upperdirs = os.path.dirname(targetpath)
if upperdirs and not os.path.exists(upperdirs):
os.makedirs(upperdirs)
if member.filename[-1] == '/':
if not os.path.isdir(targetpath):
os.mkdir(targetpath)
return targetpath
source = self.open(member, pwd=pwd)
target = file(targetpath, "wb")
shutil.copyfileobj(source, target)
source.close()
target.close()
return targetpath
def _writecheck(self, zinfo):
"""Check for errors before writing a file to the archive."""
if zinfo.filename in self.NameToInfo:
if self.debug: # Warning for duplicate names
print "Duplicate name:", zinfo.filename
if self.mode not in ("w", "a"):
raise RuntimeError, 'write() requires mode "w" or "a"'
if not self.fp:
raise RuntimeError, \
"Attempt to write ZIP archive that was already closed"
if zinfo.compress_type == ZIP_DEFLATED and not zlib:
raise RuntimeError, \
"Compression requires the (missing) zlib module"
if zinfo.compress_type not in (ZIP_STORED, ZIP_DEFLATED):
raise RuntimeError, \
"That compression method is not supported"
if zinfo.file_size > ZIP64_LIMIT:
if not self._allowZip64:
raise LargeZipFile("Filesize would require ZIP64 extensions")
if zinfo.header_offset > ZIP64_LIMIT:
if not self._allowZip64:
raise LargeZipFile("Zipfile size would require ZIP64 extensions")
def write(self, filename, arcname=None, compress_type=None):
"""Put the bytes from filename into the archive under the name
arcname."""
if not self.fp:
raise RuntimeError(
"Attempt to write to ZIP archive that was already closed")
st = os.stat(filename)
isdir = stat.S_ISDIR(st.st_mode)
mtime = time.localtime(st.st_mtime)
date_time = mtime[0:6]
# Create ZipInfo instance to store file information
if arcname is None:
arcname = filename
arcname = os.path.normpath(os.path.splitdrive(arcname)[1])
while arcname[0] in (os.sep, os.altsep):
arcname = arcname[1:]
if isdir:
arcname += '/'
zinfo = ZipInfo(arcname, date_time)
zinfo.external_attr = (st[0] & 0xFFFF) << 16L # Unix attributes
if compress_type is None:
zinfo.compress_type = self.compression
else:
zinfo.compress_type = compress_type
zinfo.file_size = st.st_size
zinfo.flag_bits = 0x00
zinfo.header_offset = self.fp.tell() # Start of header bytes
self._writecheck(zinfo)
self._didModify = True
if isdir:
zinfo.file_size = 0
zinfo.compress_size = 0
zinfo.CRC = 0
self.filelist.append(zinfo)
self.NameToInfo[zinfo.filename] = zinfo
self.fp.write(zinfo.FileHeader())
return
with open(filename, "rb") as fp:
# Must overwrite CRC and sizes with correct data later
zinfo.CRC = CRC = 0
zinfo.compress_size = compress_size = 0
zinfo.file_size = file_size = 0
self.fp.write(zinfo.FileHeader())
if zinfo.compress_type == ZIP_DEFLATED:
cmpr = zlib.compressobj(zlib.Z_DEFAULT_COMPRESSION,
zlib.DEFLATED, -15)
else:
cmpr = None
while 1:
buf = fp.read(1024 * 8)
if not buf:
break
file_size = file_size + len(buf)
CRC = crc32(buf, CRC) & 0xffffffff
if cmpr:
buf = cmpr.compress(buf)
compress_size = compress_size + len(buf)
self.fp.write(buf)
if cmpr:
buf = cmpr.flush()
compress_size = compress_size + len(buf)
self.fp.write(buf)
zinfo.compress_size = compress_size
else:
zinfo.compress_size = file_size
zinfo.CRC = CRC
zinfo.file_size = file_size
# Seek backwards and write CRC and file sizes
position = self.fp.tell() # Preserve current position in file
self.fp.seek(zinfo.header_offset + 14, 0)
self.fp.write(struct.pack("<LLL", zinfo.CRC, zinfo.compress_size,
zinfo.file_size))
self.fp.seek(position, 0)
self.filelist.append(zinfo)
self.NameToInfo[zinfo.filename] = zinfo
def writestr(self, zinfo_or_arcname, bytes, compress_type=None):
"""Write a file into the archive. The contents is the string
'bytes'. 'zinfo_or_arcname' is either a ZipInfo instance or
the name of the file in the archive."""
if not isinstance(zinfo_or_arcname, ZipInfo):
zinfo = ZipInfo(filename=zinfo_or_arcname,
date_time=time.localtime(time.time())[:6])
zinfo.compress_type = self.compression
zinfo.external_attr = 0600 << 16
else:
zinfo = zinfo_or_arcname
if not self.fp:
raise RuntimeError(
"Attempt to write to ZIP archive that was already closed")
if compress_type is not None:
zinfo.compress_type = compress_type
zinfo.file_size = len(bytes) # Uncompressed size
zinfo.header_offset = self.fp.tell() # Start of header bytes
self._writecheck(zinfo)
self._didModify = True
zinfo.CRC = crc32(bytes) & 0xffffffff # CRC-32 checksum
if zinfo.compress_type == ZIP_DEFLATED:
co = zlib.compressobj(zlib.Z_DEFAULT_COMPRESSION,
zlib.DEFLATED, -15)
bytes = co.compress(bytes) + co.flush()
zinfo.compress_size = len(bytes) # Compressed size
else:
zinfo.compress_size = zinfo.file_size
zinfo.header_offset = self.fp.tell() # Start of header bytes
self.fp.write(zinfo.FileHeader())
self.fp.write(bytes)
self.fp.flush()
if zinfo.flag_bits & 0x08:
# Write CRC and file sizes after the file data
self.fp.write(struct.pack("<LLL", zinfo.CRC, zinfo.compress_size,
zinfo.file_size))
self.filelist.append(zinfo)
self.NameToInfo[zinfo.filename] = zinfo
def __del__(self):
"""Call the "close()" method in case the user forgot."""
self.close()
def close(self):
"""Close the file, and for mode "w" and "a" write the ending
records."""
if self.fp is None:
return
if self.mode in ("w", "a") and self._didModify: # write ending records
count = 0
pos1 = self.fp.tell()
for zinfo in self.filelist: # write central directory
count = count + 1
dt = zinfo.date_time
dosdate = (dt[0] - 1980) << 9 | dt[1] << 5 | dt[2]
dostime = dt[3] << 11 | dt[4] << 5 | (dt[5] // 2)
extra = []
if zinfo.file_size > ZIP64_LIMIT \
or zinfo.compress_size > ZIP64_LIMIT:
extra.append(zinfo.file_size)
extra.append(zinfo.compress_size)
file_size = 0xffffffff
compress_size = 0xffffffff
else:
file_size = zinfo.file_size
compress_size = zinfo.compress_size
if zinfo.header_offset > ZIP64_LIMIT:
extra.append(zinfo.header_offset)
header_offset = 0xffffffffL
else:
header_offset = zinfo.header_offset
extra_data = zinfo.extra
if extra:
# Append a ZIP64 field to the extra's
extra_data = struct.pack(
'<HH' + 'Q'*len(extra),
1, 8*len(extra), *extra) + extra_data
extract_version = max(45, zinfo.extract_version)
create_version = max(45, zinfo.create_version)
else:
extract_version = zinfo.extract_version
create_version = zinfo.create_version
try:
filename, flag_bits = zinfo._encodeFilenameFlags()
centdir = struct.pack(structCentralDir,
stringCentralDir, create_version,
zinfo.create_system, extract_version, zinfo.reserved,
flag_bits, zinfo.compress_type, dostime, dosdate,
zinfo.CRC, compress_size, file_size,
len(filename), len(extra_data), len(zinfo.comment),
0, zinfo.internal_attr, zinfo.external_attr,
header_offset)
except DeprecationWarning:
print >>sys.stderr, (structCentralDir,
stringCentralDir, create_version,
zinfo.create_system, extract_version, zinfo.reserved,
zinfo.flag_bits, zinfo.compress_type, dostime, dosdate,
zinfo.CRC, compress_size, file_size,
len(zinfo.filename), len(extra_data), len(zinfo.comment),
0, zinfo.internal_attr, zinfo.external_attr,
header_offset)
raise
self.fp.write(centdir)
self.fp.write(filename)
self.fp.write(extra_data)
self.fp.write(zinfo.comment)
pos2 = self.fp.tell()
# Write end-of-zip-archive record
centDirCount = count
centDirSize = pos2 - pos1
centDirOffset = pos1
if (centDirCount >= ZIP_FILECOUNT_LIMIT or
centDirOffset > ZIP64_LIMIT or
centDirSize > ZIP64_LIMIT):
# Need to write the ZIP64 end-of-archive records
zip64endrec = struct.pack(
structEndArchive64, stringEndArchive64,
44, 45, 45, 0, 0, centDirCount, centDirCount,
centDirSize, centDirOffset)
self.fp.write(zip64endrec)
zip64locrec = struct.pack(
structEndArchive64Locator,
stringEndArchive64Locator, 0, pos2, 1)
self.fp.write(zip64locrec)
centDirCount = min(centDirCount, 0xFFFF)
centDirSize = min(centDirSize, 0xFFFFFFFF)
centDirOffset = min(centDirOffset, 0xFFFFFFFF)
# check for valid comment length
if len(self.comment) >= ZIP_MAX_COMMENT:
if self.debug > 0:
msg = 'Archive comment is too long; truncating to %d bytes' \
% ZIP_MAX_COMMENT
self.comment = self.comment[:ZIP_MAX_COMMENT]
endrec = struct.pack(structEndArchive, stringEndArchive,
0, 0, centDirCount, centDirCount,
centDirSize, centDirOffset, len(self.comment))
self.fp.write(endrec)
self.fp.write(self.comment)
self.fp.flush()
if not self._filePassed:
self.fp.close()
self.fp = None
class PyZipFile(ZipFile):
"""Class to create ZIP archives with Python library files and packages."""
def writepy(self, pathname, basename = ""):
"""Add all files from "pathname" to the ZIP archive.
If pathname is a package directory, search the directory and
all package subdirectories recursively for all *.py and enter
the modules into the archive. If pathname is a plain
directory, listdir *.py and enter all modules. Else, pathname
must be a Python *.py file and the module will be put into the
archive. Added modules are always module.pyo or module.pyc.
This method will compile the module.py into module.pyc if
necessary.
"""
dir, name = os.path.split(pathname)
if os.path.isdir(pathname):
initname = os.path.join(pathname, "__init__.py")
if os.path.isfile(initname):
# This is a package directory, add it
if basename:
basename = "%s/%s" % (basename, name)
else:
basename = name
if self.debug:
print "Adding package in", pathname, "as", basename
fname, arcname = self._get_codename(initname[0:-3], basename)
if self.debug:
print "Adding", arcname
self.write(fname, arcname)
dirlist = os.listdir(pathname)
dirlist.remove("__init__.py")
# Add all *.py files and package subdirectories
for filename in dirlist:
path = os.path.join(pathname, filename)
root, ext = os.path.splitext(filename)
if os.path.isdir(path):
if os.path.isfile(os.path.join(path, "__init__.py")):
# This is a package directory, add it
self.writepy(path, basename) # Recursive call
elif ext == ".py":
fname, arcname = self._get_codename(path[0:-3],
basename)
if self.debug:
print "Adding", arcname
self.write(fname, arcname)
else:
# This is NOT a package directory, add its files at top level
if self.debug:
print "Adding files from directory", pathname
for filename in os.listdir(pathname):
path = os.path.join(pathname, filename)
root, ext = os.path.splitext(filename)
if ext == ".py":
fname, arcname = self._get_codename(path[0:-3],
basename)
if self.debug:
print "Adding", arcname
self.write(fname, arcname)
else:
if pathname[-3:] != ".py":
raise RuntimeError, \
'Files added with writepy() must end with ".py"'
fname, arcname = self._get_codename(pathname[0:-3], basename)
if self.debug:
print "Adding file", arcname
self.write(fname, arcname)
def _get_codename(self, pathname, basename):
"""Return (filename, archivename) for the path.
Given a module name path, return the correct file path and
archive name, compiling if necessary. For example, given
/python/lib/string, return (/python/lib/string.pyc, string).
"""
file_py = pathname + ".py"
file_pyc = pathname + ".pyc"
file_pyo = pathname + ".pyo"
if os.path.isfile(file_pyo) and \
os.stat(file_pyo).st_mtime >= os.stat(file_py).st_mtime:
fname = file_pyo # Use .pyo file
elif not os.path.isfile(file_pyc) or \
os.stat(file_pyc).st_mtime < os.stat(file_py).st_mtime:
import py_compile
if self.debug:
print "Compiling", file_py
try:
py_compile.compile(file_py, file_pyc, None, True)
except py_compile.PyCompileError,err:
print err.msg
fname = file_pyc
else:
fname = file_pyc
archivename = os.path.split(fname)[1]
if basename:
archivename = "%s/%s" % (basename, archivename)
return (fname, archivename)
def main(args = None):
import textwrap
USAGE=textwrap.dedent("""\
Usage:
zipfile.py -l zipfile.zip # Show listing of a zipfile
zipfile.py -t zipfile.zip # Test if a zipfile is valid
zipfile.py -e zipfile.zip target # Extract zipfile into target dir
zipfile.py -c zipfile.zip src ... # Create zipfile from sources
""")
if args is None:
args = sys.argv[1:]
if not args or args[0] not in ('-l', '-c', '-e', '-t'):
print USAGE
sys.exit(1)
if args[0] == '-l':
if len(args) != 2:
print USAGE
sys.exit(1)
zf = ZipFile(args[1], 'r')
zf.printdir()
zf.close()
elif args[0] == '-t':
if len(args) != 2:
print USAGE
sys.exit(1)
zf = ZipFile(args[1], 'r')
badfile = zf.testzip()
if badfile:
print("The following enclosed file is corrupted: {!r}".format(badfile))
print "Done testing"
elif args[0] == '-e':
if len(args) != 3:
print USAGE
sys.exit(1)
zf = ZipFile(args[1], 'r')
out = args[2]
for path in zf.namelist():
if path.startswith('./'):
tgt = os.path.join(out, path[2:])
else:
tgt = os.path.join(out, path)
tgtdir = os.path.dirname(tgt)
if not os.path.exists(tgtdir):
os.makedirs(tgtdir)
with open(tgt, 'wb') as fp:
fp.write(zf.read(path))
zf.close()
elif args[0] == '-c':
if len(args) < 3:
print USAGE
sys.exit(1)
def addToZip(zf, path, zippath):
if os.path.isfile(path):
zf.write(path, zippath, ZIP_DEFLATED)
elif os.path.isdir(path):
for nm in os.listdir(path):
addToZip(zf,
os.path.join(path, nm), os.path.join(zippath, nm))
# else: ignore
zf = ZipFile(args[1], 'w', allowZip64=True)
for src in args[2:]:
addToZip(zf, src, os.path.basename(src))
zf.close()
if __name__ == "__main__":
main()
| mit | -3,206,429,268,566,563,000 | 36.599165 | 103 | 0.555062 | false |
nazo/ansible | lib/ansible/modules/windows/win_robocopy.py | 72 | 4833 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2015, Corwin Brown <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# this is a windows documentation stub. actual code lives in the .ps1
# file of the same name
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: win_robocopy
version_added: "2.2"
short_description: Synchronizes the contents of two directories using Robocopy.
description:
- Synchronizes the contents of two directories on the remote machine. Under the hood this just calls out to RoboCopy, since that should be available
on most modern Windows Systems.
options:
src:
description:
- Source file/directory to sync.
required: true
dest:
description:
- Destination file/directory to sync (Will receive contents of src).
required: true
recurse:
description:
- Includes all subdirectories (Toggles the `/e` flag to RoboCopy). If "flags" is set, this will be ignored.
choices:
- true
- false
default: false
required: false
purge:
description:
- Deletes any files/directories found in the destination that do not exist in the source (Toggles the `/purge` flag to RoboCopy). If "flags" is
set, this will be ignored.
choices:
- true
- false
default: false
required: false
flags:
description:
- Directly supply Robocopy flags. If set, purge and recurse will be ignored.
default: None
required: false
author: Corwin Brown (@blakfeld)
notes:
- This is not a complete port of the "synchronize" module. Unlike the "synchronize" module this only performs the sync/copy on the remote machine,
not from the master to the remote machine.
- This module does not currently support all Robocopy flags.
- Works on Windows 7, Windows 8, Windows Server 2k8, and Windows Server 2k12
'''
EXAMPLES = r'''
- name: Sync the contents of one directory to another
win_robocopy:
src: C:\DirectoryOne
dest: C:\DirectoryTwo
- name: Sync the contents of one directory to another, including subdirectories
win_robocopy:
src: C:\DirectoryOne
dest: C:\DirectoryTwo
recurse: True
- name: Sync the contents of one directory to another, and remove any files/directories found in destination that do not exist in the source
win_robocopy:
src: C:\DirectoryOne
dest: C:\DirectoryTwo
purge: True
- name: Sync content in recursive mode, removing any files/directories found in destination that do not exist in the source
win_robocopy:
src: C:\DirectoryOne
dest: C:\DirectoryTwo
recurse: True
purge: True
- name: Sync Two Directories in recursive and purging mode, specifying additional special flags
win_robocopy:
src: C:\DirectoryOne
dest: C:\DirectoryTwo
flags: /E /PURGE /XD SOME_DIR /XF SOME_FILE /MT:32
'''
RETURN = r'''
src:
description: The Source file/directory of the sync.
returned: always
type: string
sample: c:\Some\Path
dest:
description: The Destination file/directory of the sync.
returned: always
type: string
sample: c:\Some\Path
recurse:
description: Whether or not the recurse flag was toggled.
returned: always
type: bool
sample: False
purge:
description: Whether or not the purge flag was toggled.
returned: always
type: bool
sample: False
flags:
description: Any flags passed in by the user.
returned: always
type: string
sample: "/e /purge"
rc:
description: The return code retuned by robocopy.
returned: success
type: int
sample: 1
output:
description: The output of running the robocopy command.
returned: success
type: string
sample: "----------------------------------------\n ROBOCOPY :: Robust File Copy for Windows \n----------------------------------------\n"
msg:
description: Output intrepreted into a concise message.
returned: always
type: string
sample: No files copied!
changed:
description: Whether or not any changes were made.
returned: always
type: bool
sample: False
'''
| gpl-3.0 | 8,080,699,289,279,458,000 | 30.383117 | 160 | 0.683633 | false |
bis12/pushmanager | tests/test_template_push.py | 3 | 7070 | import time
import testing as T
class PushTemplateTest(T.TemplateTestCase):
authenticated = True
push_page = 'push.html'
push_status_page = 'push-status.html'
accepting_push_sections = ['blessed', 'verified', 'staged', 'added', 'pickme', 'requested']
now = time.time()
basic_push = {
'id': 0,
'user': 'pushmaster',
'title': 'fake_push',
'branch': 'deploy-fake-branch',
'state': 'accepting',
'pushtype': 'Regular',
'created': now,
'modified': now,
'extra_pings': None,
}
basic_kwargs = {
'page_title': 'fake_push_title',
'push_contents': {},
'available_requests': [],
'fullrepo': 'not/a/repo',
'override': False,
'push_survey_url': None
}
basic_request = {
'id': 0,
'repo': 'non-existent',
'branch': 'non-existent',
'user': 'testuser',
'reviewid': 0,
'title': 'some title',
'tags': None,
'revision': '0' * 40,
'state': 'requested',
'created': now,
'modified': now,
'description': 'nondescript',
'comments': 'nocomment',
'watchers': None,
}
def test_include_push_status_when_accepting(self):
tree = self.render_etree(
self.push_page,
push_info=self.basic_push,
**self.basic_kwargs)
found_h3 = []
for h3 in tree.iter('h3'):
T.assert_equal('status-header', h3.attrib['class'])
T.assert_in(h3.attrib['section'], self.accepting_push_sections)
found_h3.append(h3)
T.assert_equal(len(self.accepting_push_sections), len(found_h3))
def test_include_push_status_when_done(self):
push = dict(self.basic_push)
push['state'] = 'live'
tree = self.render_etree(
self.push_page,
push_info=push,
**self.basic_kwargs)
found_h3 = []
for h3 in tree.iter('h3'):
T.assert_equal('status-header', h3.attrib['class'])
found_h3.append(h3)
T.assert_equal(1, len(found_h3))
def generate_push_contents(self, requests):
push_contents = dict.fromkeys(self.accepting_push_sections, [])
for section in self.accepting_push_sections:
push_contents[section] = requests
return push_contents
def test_no_mine_on_requests_as_random_user(self):
kwargs = dict(self.basic_kwargs)
kwargs['push_contents'] = self.generate_push_contents([self.basic_request])
kwargs['current_user'] = 'random_user'
with self.no_ui_modules():
tree = self.render_etree(
self.push_status_page,
push_info=self.basic_push,
**kwargs)
found_mockreq = []
for mockreq in tree.iter('mock'):
T.assert_not_in('class', mockreq.getparent().attrib.keys())
found_mockreq.append(mockreq)
T.assert_equal(5, len(found_mockreq))
def test_mine_on_requests_as_request_user(self):
request = dict(self.basic_request)
request['user'] = 'notme'
push_contents = {}
section_id = []
for section in self.accepting_push_sections:
push_contents[section] = [self.basic_request, request]
section_id.append('%s-items' % section)
kwargs = dict(self.basic_kwargs)
kwargs['push_contents'] = push_contents
kwargs['current_user'] = 'testuser'
with self.no_ui_modules():
tree = self.render_etree(
self.push_status_page,
push_info=self.basic_push,
**kwargs)
found_li = []
found_mockreq = []
for mockreq in tree.iter('mock'):
if 'class' in mockreq.getparent().attrib:
T.assert_equal('mine', mockreq.getparent().attrib['class'])
found_li.append(mockreq)
found_mockreq.append(mockreq)
T.assert_equal(5, len(found_li))
T.assert_equal(10, len(found_mockreq))
def test_mine_on_requests_as_watcher(self):
request = dict(self.basic_request)
request['watchers'] = 'watcher1'
push_contents = {}
section_id = []
for section in self.accepting_push_sections:
push_contents[section] = [request, self.basic_request]
section_id.append('%s-items' % section)
kwargs = dict(self.basic_kwargs)
kwargs['push_contents'] = push_contents
kwargs['current_user'] = 'watcher1'
with self.no_ui_modules():
tree = self.render_etree(
self.push_status_page,
push_info=self.basic_push,
**kwargs)
found_li = []
found_mockreq = []
for mockreq in tree.iter('mock'):
if 'class' in mockreq.getparent().attrib:
T.assert_equal('mine', mockreq.getparent().attrib['class'])
found_li.append(mockreq)
found_mockreq.append(mockreq)
T.assert_equal(5, len(found_li))
T.assert_equal(10, len(found_mockreq))
def test_mine_on_requests_as_pushmaster(self):
push_contents = {}
section_id = []
for section in self.accepting_push_sections:
push_contents[section] = [self.basic_request]
section_id.append('%s-items' % section)
kwargs = dict(self.basic_kwargs)
kwargs['push_contents'] = push_contents
with self.no_ui_modules():
tree = self.render_etree(
self.push_status_page,
push_info=self.basic_push,
**kwargs)
found_mockreq = []
for mockreq in tree.iter('mock'):
T.assert_not_in('class', mockreq.getparent().attrib.keys())
found_mockreq.append(mockreq)
T.assert_equal(5, len(found_mockreq))
def test_include_push_survey_exists(self):
push = dict(self.basic_push)
push['state'] = 'live'
kwargs = dict(**self.basic_kwargs)
kwargs['push_survey_url'] = 'http://sometestsurvey'
tree = self.render_etree(
self.push_page,
push_info=push,
**kwargs)
for script in tree.iter('script'):
if script.text and kwargs['push_survey_url'] in script.text:
break
else:
assert False, 'push_survey_url not found'
def test_include_new_request_form(self):
with self.no_ui_modules():
tree = self.render_etree(
self.push_page,
push_info=self.basic_push,
**self.basic_kwargs)
T.assert_exactly_one(
*[mock.attrib['name'] for mock in tree.iter('mock')],
truthy_fxn=lambda name: name == 'mock.NewRequestDialog()')
if __name__ == '__main__':
T.run()
| apache-2.0 | 3,026,572,554,180,221,400 | 30.846847 | 95 | 0.534512 | false |
dischinator/pyload | module/plugins/hoster/PornhubCom.py | 5 | 2547 | # -*- coding: utf-8 -*-
import re
from module.plugins.internal.Hoster import Hoster
class PornhubCom(Hoster):
__name__ = "PornhubCom"
__type__ = "hoster"
__version__ = "0.55"
__status__ = "testing"
__pattern__ = r'http://(?:www\.)?pornhub\.com/view_video\.php\?viewkey=\w+'
__config__ = [("activated", "bool", "Activated", True)]
__description__ = """Pornhub.com hoster plugin"""
__license__ = "GPLv3"
__authors__ = [("jeix", "[email protected]")]
def process(self, pyfile):
self.download_html()
if not self.file_exists():
self.offline()
pyfile.name = self.get_file_name()
self.download(self.get_file_url())
def download_html(self):
url = self.pyfile.url
self.data = self.load(url)
def get_file_url(self):
"""
Returns the absolute downloadable filepath
"""
if not self.data:
self.download_html()
url = "http://www.pornhub.com//gateway.php"
video_id = self.pyfile.url.split('=')[-1]
#: Thanks to jD team for this one v
post_data = "\x00\x03\x00\x00\x00\x01\x00\x0c\x70\x6c\x61\x79\x65\x72\x43\x6f\x6e\x66\x69\x67\x00\x02\x2f\x31\x00\x00\x00\x44\x0a\x00\x00\x00\x03\x02\x00"
post_data += chr(len(video_id))
post_data += video_id
post_data += "\x02\x00\x02\x2d\x31\x02\x00\x20"
post_data += "add299463d4410c6d1b1c418868225f7"
content = self.load(url, post=str(post_data))
new_content = ""
for x in content:
if ord(x) < 32 or ord(x) > 176:
new_content += '#'
else:
new_content += x
content = new_content
return re.search(r'flv_url.*(http.*?)##post_roll', content).group(1)
def get_file_name(self):
if not self.data:
self.download_html()
m = re.search(r'<title.+?>(.+?) - ', self.data)
if m is not None:
name = m.group(1)
else:
matches = re.findall('<h1>(.*?)</h1>', self.data)
if len(matches) > 1:
name = matches[1]
else:
name = matches[0]
return name + '.flv'
def file_exists(self):
"""
Returns True or False
"""
if not self.data:
self.download_html()
if re.search(r'This video is no longer in our database or is in conversion', self.data):
return False
else:
return True
| gpl-3.0 | 1,785,887,796,485,136,000 | 26.387097 | 162 | 0.522183 | false |
happyleavesaoc/home-assistant | homeassistant/helpers/event.py | 4 | 11145 | """Helpers for listening to events."""
import functools as ft
from homeassistant.helpers.sun import get_astral_event_next
from ..core import HomeAssistant, callback
from ..const import (
ATTR_NOW, EVENT_STATE_CHANGED, EVENT_TIME_CHANGED, MATCH_ALL)
from ..util import dt as dt_util
from ..util.async import run_callback_threadsafe
# PyLint does not like the use of threaded_listener_factory
# pylint: disable=invalid-name
def threaded_listener_factory(async_factory):
"""Convert an async event helper to a threaded one."""
@ft.wraps(async_factory)
def factory(*args, **kwargs):
"""Call async event helper safely."""
hass = args[0]
if not isinstance(hass, HomeAssistant):
raise TypeError('First parameter needs to be a hass instance')
async_remove = run_callback_threadsafe(
hass.loop, ft.partial(async_factory, *args, **kwargs)).result()
def remove():
"""Threadsafe removal."""
run_callback_threadsafe(hass.loop, async_remove).result()
return remove
return factory
@callback
def async_track_state_change(hass, entity_ids, action, from_state=None,
to_state=None):
"""Track specific state changes.
entity_ids, from_state and to_state can be string or list.
Use list to match multiple.
Returns a function that can be called to remove the listener.
Must be run within the event loop.
"""
from_state = _process_state_match(from_state)
to_state = _process_state_match(to_state)
# Ensure it is a lowercase list with entity ids we want to match on
if entity_ids == MATCH_ALL:
pass
elif isinstance(entity_ids, str):
entity_ids = (entity_ids.lower(),)
else:
entity_ids = tuple(entity_id.lower() for entity_id in entity_ids)
@callback
def state_change_listener(event):
"""Handle specific state changes."""
if entity_ids != MATCH_ALL and \
event.data.get('entity_id') not in entity_ids:
return
if event.data.get('old_state') is not None:
old_state = event.data['old_state'].state
else:
old_state = None
if event.data.get('new_state') is not None:
new_state = event.data['new_state'].state
else:
new_state = None
if _matcher(old_state, from_state) and _matcher(new_state, to_state):
hass.async_run_job(action, event.data.get('entity_id'),
event.data.get('old_state'),
event.data.get('new_state'))
return hass.bus.async_listen(EVENT_STATE_CHANGED, state_change_listener)
track_state_change = threaded_listener_factory(async_track_state_change)
@callback
def async_track_template(hass, template, action, variables=None):
"""Add a listener that track state changes with template condition."""
from . import condition
# Local variable to keep track of if the action has already been triggered
already_triggered = False
@callback
def template_condition_listener(entity_id, from_s, to_s):
"""Check if condition is correct and run action."""
nonlocal already_triggered
template_result = condition.async_template(hass, template, variables)
# Check to see if template returns true
if template_result and not already_triggered:
already_triggered = True
hass.async_run_job(action, entity_id, from_s, to_s)
elif not template_result:
already_triggered = False
return async_track_state_change(
hass, template.extract_entities(), template_condition_listener)
track_template = threaded_listener_factory(async_track_template)
@callback
def async_track_point_in_time(hass, action, point_in_time):
"""Add a listener that fires once after a specific point in time."""
utc_point_in_time = dt_util.as_utc(point_in_time)
@callback
def utc_converter(utc_now):
"""Convert passed in UTC now to local now."""
hass.async_run_job(action, dt_util.as_local(utc_now))
return async_track_point_in_utc_time(hass, utc_converter,
utc_point_in_time)
track_point_in_time = threaded_listener_factory(async_track_point_in_time)
@callback
def async_track_point_in_utc_time(hass, action, point_in_time):
"""Add a listener that fires once after a specific point in UTC time."""
# Ensure point_in_time is UTC
point_in_time = dt_util.as_utc(point_in_time)
@callback
def point_in_time_listener(event):
"""Listen for matching time_changed events."""
now = event.data[ATTR_NOW]
if now < point_in_time or hasattr(point_in_time_listener, 'run'):
return
# Set variable so that we will never run twice.
# Because the event bus might have to wait till a thread comes
# available to execute this listener it might occur that the
# listener gets lined up twice to be executed. This will make
# sure the second time it does nothing.
point_in_time_listener.run = True
async_unsub()
hass.async_run_job(action, now)
async_unsub = hass.bus.async_listen(EVENT_TIME_CHANGED,
point_in_time_listener)
return async_unsub
track_point_in_utc_time = threaded_listener_factory(
async_track_point_in_utc_time)
@callback
def async_track_time_interval(hass, action, interval):
"""Add a listener that fires repetitively at every timedelta interval."""
remove = None
def next_interval():
"""Return the next interval."""
return dt_util.utcnow() + interval
@callback
def interval_listener(now):
"""Handle elaspsed intervals."""
nonlocal remove
remove = async_track_point_in_utc_time(
hass, interval_listener, next_interval())
hass.async_run_job(action, now)
remove = async_track_point_in_utc_time(
hass, interval_listener, next_interval())
def remove_listener():
"""Remove interval listener."""
remove()
return remove_listener
track_time_interval = threaded_listener_factory(async_track_time_interval)
@callback
def async_track_sunrise(hass, action, offset=None):
"""Add a listener that will fire a specified offset from sunrise daily."""
remove = None
@callback
def sunrise_automation_listener(now):
"""Handle points in time to execute actions."""
nonlocal remove
remove = async_track_point_in_utc_time(
hass, sunrise_automation_listener, get_astral_event_next(
hass, 'sunrise', offset=offset))
hass.async_run_job(action)
remove = async_track_point_in_utc_time(
hass, sunrise_automation_listener, get_astral_event_next(
hass, 'sunrise', offset=offset))
def remove_listener():
"""Remove sunset listener."""
remove()
return remove_listener
track_sunrise = threaded_listener_factory(async_track_sunrise)
@callback
def async_track_sunset(hass, action, offset=None):
"""Add a listener that will fire a specified offset from sunset daily."""
remove = None
@callback
def sunset_automation_listener(now):
"""Handle points in time to execute actions."""
nonlocal remove
remove = async_track_point_in_utc_time(
hass, sunset_automation_listener, get_astral_event_next(
hass, 'sunset', offset=offset))
hass.async_run_job(action)
remove = async_track_point_in_utc_time(
hass, sunset_automation_listener, get_astral_event_next(
hass, 'sunset', offset=offset))
def remove_listener():
"""Remove sunset listener."""
remove()
return remove_listener
track_sunset = threaded_listener_factory(async_track_sunset)
@callback
def async_track_utc_time_change(hass, action, year=None, month=None, day=None,
hour=None, minute=None, second=None,
local=False):
"""Add a listener that will fire if time matches a pattern."""
# We do not have to wrap the function with time pattern matching logic
# if no pattern given
if all(val is None for val in (year, month, day, hour, minute, second)):
@callback
def time_change_listener(event):
"""Fire every time event that comes in."""
hass.async_run_job(action, event.data[ATTR_NOW])
return hass.bus.async_listen(EVENT_TIME_CHANGED, time_change_listener)
pmp = _process_time_match
year, month, day = pmp(year), pmp(month), pmp(day)
hour, minute, second = pmp(hour), pmp(minute), pmp(second)
@callback
def pattern_time_change_listener(event):
"""Listen for matching time_changed events."""
now = event.data[ATTR_NOW]
if local:
now = dt_util.as_local(now)
mat = _matcher
# pylint: disable=too-many-boolean-expressions
if mat(now.year, year) and \
mat(now.month, month) and \
mat(now.day, day) and \
mat(now.hour, hour) and \
mat(now.minute, minute) and \
mat(now.second, second):
hass.async_run_job(action, now)
return hass.bus.async_listen(EVENT_TIME_CHANGED,
pattern_time_change_listener)
track_utc_time_change = threaded_listener_factory(async_track_utc_time_change)
@callback
def async_track_time_change(hass, action, year=None, month=None, day=None,
hour=None, minute=None, second=None):
"""Add a listener that will fire if UTC time matches a pattern."""
return async_track_utc_time_change(hass, action, year, month, day, hour,
minute, second, local=True)
track_time_change = threaded_listener_factory(async_track_time_change)
def _process_state_match(parameter):
"""Wrap parameter in a tuple if it is not one and returns it."""
if parameter is None or parameter == MATCH_ALL:
return MATCH_ALL
elif isinstance(parameter, str) or not hasattr(parameter, '__iter__'):
return (parameter,)
else:
return tuple(parameter)
def _process_time_match(parameter):
"""Wrap parameter in a tuple if it is not one and returns it."""
if parameter is None or parameter == MATCH_ALL:
return MATCH_ALL
elif isinstance(parameter, str) and parameter.startswith('/'):
return parameter
elif isinstance(parameter, str) or not hasattr(parameter, '__iter__'):
return (parameter,)
else:
return tuple(parameter)
def _matcher(subject, pattern):
"""Return True if subject matches the pattern.
Pattern is either a tuple of allowed subjects or a `MATCH_ALL`.
"""
if isinstance(pattern, str) and pattern.startswith('/'):
try:
return subject % float(pattern.lstrip('/')) == 0
except ValueError:
return False
return MATCH_ALL == pattern or subject in pattern
| apache-2.0 | -4,232,743,945,368,165,000 | 31.587719 | 78 | 0.635083 | false |
pywikibot-catfiles/file-metadata | setupdeps.py | 2 | 16766 | # -*- coding: utf-8 -*-
"""
Various dependencies that are required for file-metadata which need some
special handling.
"""
from __future__ import (division, absolute_import, unicode_literals,
print_function)
import ctypes.util
import hashlib
import os
import subprocess
import sys
from distutils import sysconfig
from distutils.errors import DistutilsSetupError
try:
from urllib.request import urlopen
except ImportError: # Python 2
from urllib2 import urlopen
PROJECT_PATH = os.path.abspath(os.path.dirname(__file__))
def data_path():
name = os.path.join(PROJECT_PATH, 'file_metadata', 'datafiles')
if not os.path.exists(name):
os.makedirs(name)
return name
def which(cmd):
try:
from shutil import which
return which(cmd)
except ImportError: # For python 3.2 and lower
try:
output = subprocess.check_output(["which", cmd],
stderr=subprocess.STDOUT)
except (OSError, subprocess.CalledProcessError):
return None
else:
output = output.decode(sys.getfilesystemencoding())
return output.strip()
def setup_install(packages):
"""
Install packages using pip to the current folder. Useful to import
packages during setup itself.
"""
packages = list(packages)
if not packages:
return True
try:
subprocess.call([sys.executable, "-m", "pip", "install",
"-t", PROJECT_PATH] + packages)
return True
except subprocess.CalledProcessError:
return False
def download(url, filename, overwrite=False, sha1=None):
"""
Download the given URL to the given filename. If the file exists,
it won't be downloaded unless asked to overwrite. Both, text data
like html, txt, etc. or binary data like images, audio, etc. are
acceptable.
:param url: A URL to download.
:param filename: The file to store the downloaded file to.
:param overwrite: Set to True if the file should be downloaded even if it
already exists.
:param sha1: The sha1 checksum to verify the file using.
"""
blocksize = 16 * 1024
_hash = hashlib.sha1()
if os.path.exists(filename) and not overwrite:
# Do a pass for the hash if it already exists
with open(filename, "rb") as downloaded_file:
while True:
block = downloaded_file.read(blocksize)
if not block:
break
_hash.update(block)
else:
# If it doesn't exist, or overwrite=True, find hash while downloading
response = urlopen(url)
with open(filename, 'wb') as out_file:
while True:
block = response.read(blocksize)
if not block:
break
out_file.write(block)
_hash.update(block)
return _hash.hexdigest() == sha1
class CheckFailed(Exception):
"""
Exception thrown when a ``SetupPackage.check()`` fails.
"""
pass
class SetupPackage(object):
name = None
optional = False
pkg_names = {
"apt-get": None,
"yum": None,
"dnf": None,
"pacman": None,
"zypper": None,
"brew": None,
"port": None,
"windows_url": None
}
def check(self):
"""
Check whether the dependencies are met. Should raise a ``CheckFailed``
exception if the dependency was not found.
"""
pass
def get_install_requires(self):
"""
Return a list of Python packages that are required by the package.
pip / easy_install will attempt to download and install this
package if it is not installed.
"""
return []
def get_setup_requires(self):
"""
Return a list of Python packages that are required by the setup.py
itself. pip / easy_install will attempt to download and install this
package if it is not installed on top of the setup.py script.
"""
return []
def get_data_files(self):
"""
Perform required actions to add the data files into the directory
given by ``data_path()``.
"""
pass
def install_help_msg(self):
"""
The help message to show if the package is not installed. The help
message shown depends on whether some class variables are present.
"""
def _try_managers(*managers):
for manager in managers:
pkg_name = self.pkg_names.get(manager, None)
if pkg_name and which(manager) is not None:
pkg_note = None
if isinstance(pkg_name, (tuple, list)):
pkg_name, pkg_note = pkg_name
msg = ('Try installing {0} with `{1} install {2}`.'
.format(self.name, manager, pkg_name))
if pkg_note:
msg += ' Note: ' + pkg_note
return msg
message = ""
if sys.platform == "win32":
url = self.pkg_names.get("windows_url", None)
if url:
return ('Please check {0} for instructions to install {1}'
.format(url, self.name))
elif sys.platform == "darwin":
manager_message = _try_managers("brew", "port")
return manager_message or message
elif sys.platform.startswith("linux"):
try:
import distro
except ImportError:
setup_install(['distro'])
import distro
release = distro.id()
if release in ('debian', 'ubuntu', 'linuxmint', 'raspbian'):
manager_message = _try_managers('apt-get')
if manager_message:
return manager_message
elif release in ('centos', 'rhel', 'redhat', 'fedora',
'scientific', 'amazon', ):
manager_message = _try_managers('dnf', 'yum')
if manager_message:
return manager_message
elif release in ('sles', 'opensuse'):
manager_message = _try_managers('zypper')
if manager_message:
return manager_message
elif release in ('arch'):
manager_message = _try_managers('pacman')
if manager_message:
return manager_message
return message
class PkgConfig(SetupPackage):
"""
This is a class for communicating with pkg-config.
"""
name = "pkg-config"
pkg_names = {
"apt-get": 'pkg-config',
"yum": None,
"dnf": None,
"pacman": None,
"zypper": None,
"brew": 'pkg-config',
"port": None,
"windows_url": None
}
def __init__(self):
if sys.platform == 'win32':
self.has_pkgconfig = False
else:
self.pkg_config = os.environ.get('PKG_CONFIG', 'pkg-config')
self.set_pkgconfig_path()
try:
with open(os.devnull) as nul:
subprocess.check_call([self.pkg_config, "--help"],
stdout=nul, stderr=nul)
self.has_pkgconfig = True
except (subprocess.CalledProcessError, OSError):
self.has_pkgconfig = False
raise DistutilsSetupError("pkg-config is not installed. "
"Please install it to continue.\n" +
self.install_help_msg())
def set_pkgconfig_path(self):
pkgconfig_path = sysconfig.get_config_var('LIBDIR')
if pkgconfig_path is None:
return
pkgconfig_path = os.path.join(pkgconfig_path, 'pkgconfig')
if not os.path.isdir(pkgconfig_path):
return
os.environ['PKG_CONFIG_PATH'] = ':'.join(
[os.environ.get('PKG_CONFIG_PATH', ""), pkgconfig_path])
def get_version(self, package):
"""
Get the version of the package from pkg-config.
"""
if not self.has_pkgconfig:
return None
try:
output = subprocess.check_output(
[self.pkg_config, package, "--modversion"],
stderr=subprocess.STDOUT)
except subprocess.CalledProcessError:
return None
else:
output = output.decode(sys.getfilesystemencoding())
return output.strip()
# The PkgConfig class should be used through this singleton
pkg_config = PkgConfig()
class Distro(SetupPackage):
name = "distro"
def check(self):
return 'Will be installed with pip.'
def get_setup_requires(self):
try:
import distro # noqa (unused import)
return []
except ImportError:
return ['distro']
class SetupTools(SetupPackage):
name = 'setuptools'
def check(self):
return 'Will be installed with pip.'
def get_setup_requires(self):
try:
import setuptools # noqa (unused import)
return []
except ImportError:
return ['setuptools']
class PathLib(SetupPackage):
name = 'pathlib'
def check(self):
if sys.version_info < (3, 4):
return 'Backported pathlib2 will be installed with pip.'
else:
return 'Already installed in python 3.4+'
def get_install_requires(self):
if sys.version_info < (3, 4):
return ['pathlib2']
else:
return []
class AppDirs(SetupPackage):
name = 'appdirs'
def check(self):
return 'Will be installed with pip.'
def get_install_requires(self):
return ['appdirs']
class LibMagic(SetupPackage):
name = 'libmagic'
pkg_names = {
"apt-get": 'libmagic-dev',
"yum": 'file',
"dnf": 'file',
"pacman": None,
"zypper": None,
"brew": 'libmagic',
"port": None,
"windows_url": None
}
def check(self):
file_path = which('file')
if file_path is None:
raise CheckFailed('Needs to be installed manually.')
else:
return 'Found "file" utility at {0}.'.format(file_path)
class PythonMagic(SetupPackage):
name = 'python-magic'
def check(self):
return 'Will be installed with pip.'
def get_install_requires(self):
return ['python-magic']
class Six(SetupPackage):
name = 'six'
def check(self):
return 'Will be installed with pip.'
def get_install_requires(self):
return ['six>=1.8.0']
class ExifTool(SetupPackage):
name = 'exiftool'
pkg_names = {
"apt-get": 'exiftool',
"yum": 'perl-Image-ExifTool',
"dnf": 'perl-Image-ExifTool',
"pacman": None,
"zypper": None,
"brew": 'exiftool',
"port": 'p5-image-exiftool',
"windows_url": 'http://www.sno.phy.queensu.ca/~phil/exiftool/'
}
def check(self):
exiftool_path = which('exiftool')
if exiftool_path is None:
raise CheckFailed('Needs to be installed manually.')
else:
return 'Found at {0}.'.format(exiftool_path)
class Pillow(SetupPackage):
name = 'pillow'
def check(self):
return 'Will be installed with pip.'
def get_install_requires(self):
return ['pillow>=2.5.0']
class Numpy(SetupPackage):
name = 'numpy'
def check(self):
return 'Will be installed with pip.'
def get_install_requires(self):
return ['numpy>=1.7.2']
class Dlib(SetupPackage):
name = 'dlib'
def check(self):
return 'Will be installed with pip.'
def get_install_requires(self):
return ['dlib']
class ScikitImage(SetupPackage):
name = 'scikit-image'
def check(self):
return 'Will be installed with pip.'
def get_install_requires(self):
# For some reason some dependencies of scikit-image aren't installed
# by pip: https://github.com/scikit-image/scikit-image/issues/2155
return ['scipy', 'matplotlib', 'scikit-image>=0.12']
class MagickWand(SetupPackage):
name = 'magickwand'
pkg_names = {
"apt-get": 'libmagickwand-dev',
"yum": 'ImageMagick-devel',
"dnf": 'ImageMagick-devel',
"pacman": None,
"zypper": None,
"brew": 'imagemagick',
"port": 'imagemagick',
"windows_url": ("http://docs.wand-py.org/en/latest/guide/"
"install.html#install-imagemagick-on-windows")
}
def check(self):
# `wand` already checks for magickwand, but only when importing, not
# during installation. See https://github.com/dahlia/wand/issues/293
magick_wand = pkg_config.get_version("MagickWand")
if magick_wand is None:
raise CheckFailed('Needs to be installed manually.')
else:
return 'Found with pkg-config.'
class Wand(SetupPackage):
name = 'wand'
def check(self):
return 'Will be installed with pip.'
def get_install_requires(self):
return ['wand']
class PyColorName(SetupPackage):
name = 'pycolorname'
def check(self):
return 'Will be installed with pip.'
def get_install_requires(self):
return ['pycolorname']
class LibZBar(SetupPackage):
name = 'libzbar'
pkg_names = {
"apt-get": 'libzbar-dev',
"yum": 'zbar-devel',
"dnf": 'zbar-devel',
"pacman": None,
"zypper": None,
"brew": 'zbar',
"port": None,
"windows_url": None
}
def check(self):
libzbar = ctypes.util.find_library('zbar')
if libzbar is None:
raise CheckFailed('Needs to be installed manually.')
else:
return 'Found {0}.'.format(libzbar)
class ZBar(SetupPackage):
name = 'zbar'
def check(self):
return 'Will be installed with pip.'
def get_install_requires(self):
return ['zbar']
class JavaJRE(SetupPackage):
name = 'java'
pkg_names = {
"apt-get": 'default-jre',
"yum": 'java',
"dnf": 'java',
"pacman": None,
"zypper": None,
"brew": None,
"port": None,
"windows_url": "https://java.com/download/"
}
def check(self):
java_path = which('java')
if java_path is None:
raise CheckFailed('Needs to be installed manually.')
else:
return 'Found at {0}.'.format(java_path)
class ZXing(SetupPackage):
name = 'zxing'
def check(self):
return 'Will be downloaded from their maven repositories.'
@staticmethod
def download_jar(data_folder, path, name, ver, **kwargs):
data = {'name': name, 'ver': ver, 'path': path}
fname = os.path.join(data_folder, '{name}-{ver}.jar'.format(**data))
url = ('http://central.maven.org/maven2/{path}/{name}/{ver}/'
'{name}-{ver}.jar'.format(**data))
download(url, fname, **kwargs)
return fname
def get_data_files(self):
msg = 'Unable to download "{0}" correctly.'
if not self.download_jar(
data_path(), 'com/google/zxing', 'core', '3.2.1',
sha1='2287494d4f5f9f3a9a2bb6980e3f32053721b315'):
return msg.format('zxing-core')
if not self.download_jar(
data_path(), 'com/google/zxing', 'javase', '3.2.1',
sha1='78e98099b87b4737203af1fcfb514954c4f479d9'):
return msg.format('zxing-javase')
if not self.download_jar(
data_path(), 'com/beust', 'jcommander', '1.48',
sha1='bfcb96281ea3b59d626704f74bc6d625ff51cbce'):
return msg.format('jcommander')
return 'Successfully downloaded zxing-javase, zxing-core, jcommander.'
class FFProbe(SetupPackage):
name = 'ffprobe'
pkg_names = {
"apt-get": 'libav-tools',
"yum": ('ffmpeg', 'This requires the RPMFusion repo to be enabled.'),
"dnf": ('ffmpeg', 'This requires the RPMFusion repo to be enabled.'),
"pacman": None,
"zypper": None,
"brew": 'ffmpeg',
"port": None,
"windows_url": None
}
def check(self):
ffprobe_path = which('ffprobe') or which('avprobe')
if ffprobe_path is None:
raise CheckFailed('Needs to be installed manually.')
else:
return 'Found at {0}.'.format(ffprobe_path)
| mit | 5,325,013,657,302,106,000 | 28.00692 | 78 | 0.556543 | false |
elektito/pybtracker | pybtracker/client.py | 1 | 13320 | import asyncio
import os
import struct
import logging
import random
import cmd
import argparse
from urllib.parse import urlparse
from collections import defaultdict
from ipaddress import ip_address
from datetime import datetime, timedelta
from version import __version__
class ServerError(Exception):
pass
class UdpTrackerClientProto(asyncio.Protocol):
def __init__(self, client):
self.client = client
self.received_msg = None
self.sent_msgs = {}
self.logger = self.client.logger
self.connection_lost_received = asyncio.Event()
def connection_made(self, transport):
self.transport = transport
def connection_lost(self, exc):
self.connection_lost_received.set()
def datagram_received(self, data, addr):
if len(data) < 8:
self.logger.warning('Invalid datagram received.')
return
action, tid = struct.unpack('!II', data[:8])
if tid in self.sent_msgs:
self.received_msg = (action, tid, data[8:])
self.sent_msgs[tid].set()
else:
self.logger.warning('Invalid transaction ID received.')
def error_received(self, exc):
self.logger.info('UDP client transmision error: {}'.format(exc))
def get_tid(self):
tid = random.randint(0, 0xffffffff)
while tid in self.sent_msgs:
tid = random.randint(0, 0xffffffff)
self.sent_msgs[tid] = asyncio.Event()
return tid
async def send_msg(self, msg, tid):
n = 0
timeout = 15
for i in range(self.client.max_retransmissions):
try:
self.transport.sendto(msg)
await asyncio.wait_for(
self.sent_msgs[tid].wait(),
timeout=timeout)
del self.sent_msgs[tid]
except asyncio.TimeoutError:
if n >= self.client.max_retransmissions - 1:
del self.sent_msgs[tid]
raise TimeoutError('Tracker server timeout.')
action = int.from_bytes(msg[8:12], byteorder='big')
if action != 0: # if not CONNECT
delta = timedelta(seconds=self.client.connid_valid_period)
if self.client.connid_timestamp < datetime.now() - delta:
await self.connect()
n += 1
timeout = 15 * 2 ** n
self.logger.info(
'Request timeout. Retransmitting. '
'(try #{}, next timeout {} seconds)'.format(n, timeout))
else:
return
async def connect(self):
self.logger.info('Sending connect message.')
tid = self.get_tid()
msg = struct.pack('!QII', 0x41727101980, 0, tid)
await self.send_msg(msg, tid)
if self.received_msg:
action, tid, data = self.received_msg
if action == 3:
self.logger.warn('An error was received in reply to connect: {}'
.format(data.decode()))
self.client.connid = None
raise ServerError(
'An error was received in reply to connect: {}'
.format(data.decode()))
else:
self.client.callback('connected')
self.client.connid = int.from_bytes(data, byteorder='big')
self.client.connid_timestamp = datetime.now()
self.received_msg = None
else:
self.logger.info('No reply received.')
async def announce(self, infohash, num_want, downloaded, left, uploaded,
event=0, ip=0):
if not self.client.interval or not self.client.connid or \
datetime.now() > self.client.connid_timestamp + \
timedelta(seconds=self.client.connid_valid_period):
# get a connection id first
await self.connect()
if not self.client.connid:
self.logger.info('No reply to connect message.')
return
self.logger.info('Sending announce message.')
action = 1
tid = self.get_tid()
port = self.transport._sock.getsockname()[1]
key = random.randint(0, 0xffffffff)
ip = int.from_bytes(ip_address(ip).packed, byteorder='big')
msg = struct.pack('!QII20s20sQQQIIIIH', self.client.connid, action, tid,
infohash, self.client.peerid, downloaded, left,
uploaded, event, ip, key, num_want, port)
await self.send_msg(msg, tid)
if self.received_msg:
action, tid, data = self.received_msg
if action == 3:
self.logger.warning('An error was received in reply to announce: {}'
.format(data.decode()))
raise ServerError(
'An error was received in reply to announce: {}'
.format(data.decode()))
else:
if len(data) < 12:
self.logger.warning('Invalid announce reply received. Too short.')
return None
self.client.interval, leechers, seeders = struct.unpack('!III', data[:12])
self.received_msg = None
data = data[12:]
if len(data) % 6 != 0:
self.logger.warning(
'Invalid announce reply received. Invalid length.')
return None
peers = [data[i:i+6] for i in range(0, len(data), 6)]
peers = [(str(ip_address(p[:4])), int.from_bytes(p[4:], byteorder='big'))
for p in peers]
self.client.callback('announced', infohash, peers)
else:
peers = None
self.logger.info('No reply received to announce message.')
return peers
class TrackerClient:
def __init__(self,
announce_uri,
max_retransmissions=8,
loop=None):
self.logger = logging.getLogger(__name__)
scheme, netloc, _, _, _, _ = urlparse(announce_uri)
if scheme != 'udp':
raise ValueError('Tracker scheme not supported: {}'.format(scheme))
if ':' not in netloc:
self.logger.info('Port not specified in announce URI. Assuming 80.')
tracker_host, tracker_port = netloc, 80
else:
tracker_host, tracker_port = netloc.split(':')
tracker_port = int(tracker_port)
self.server_addr = tracker_host, tracker_port
self.max_retransmissions = max_retransmissions
if loop:
self.loop = loop
else:
self.loop = asyncio.get_event_loop()
self.allowed_callbacks = ['connected', 'announced']
self.connid_valid_period = 60
self.callbacks = defaultdict(list)
self.connid = None
self.connid_timestamp = None
self.interval = None
self.peerid = os.urandom(20)
def callback(self, cb, *args):
if cb not in self.allowed_callbacks:
raise ValueError('Invalid callback: {}'.format(cb))
for c in self.callbacks[cb]:
c(*args)
def add_callback(self, name, func):
if name not in self.allowed_callbacks:
raise ValueError('Invalid callback: {}'.format(cb))
self.callbacks[name].append(func)
def rm_callback(self, name, func):
if name not in self.allowed_callbacks:
raise ValueError('Invalid callback: {}'.format(cb))
self.callbacks[name].remove(func)
async def start(self):
self.transport, self.proto = await self.loop.create_datagram_endpoint(
lambda: UdpTrackerClientProto(self),
remote_addr=self.server_addr)
async def stop(self):
self.transport.close()
await self.proto.connection_lost_received.wait()
async def announce(self, infohash, downloaded, left, uploaded, event,
num_want=160):
return await self.proto.announce(
infohash, num_want, downloaded, left, uploaded, event)
async def connect(self):
return await self.proto.connect()
def hex_encoded_infohash(v):
v = bytes.fromhex(v)
if len(v) != 20:
raise ValueError
return v
class NiceArgumentParser(argparse.ArgumentParser):
def error(self, message):
self.print_usage()
print('{}: error: {}'.format(self.prog, message))
raise argparse.ArgumentError(None, message)
class ClientShell(cmd.Cmd):
intro = 'BitTorrent tracker client. Type help or ? to list commands.\n'
prompt = '(btrc) '
file = None
def __init__(self, args):
super().__init__()
self.loop = asyncio.get_event_loop()
self.client = TrackerClient(args.tracker_uri)
self.loop.run_until_complete(self.client.start())
self.is_closed = False
def do_connect(self, arg):
'Obtain a connection ID from the tracker.'
self.loop.run_until_complete(self.client.connect())
if self.client.connid:
print('Connection ID:', self.client.connid)
else:
print('No connection ID.')
def do_announce(self, arg):
'Announce an event to the tracker.'
parser = NiceArgumentParser(description='Announce to tracker.')
parser.add_argument(
'infohash', type=hex_encoded_infohash,
help='The infohash of the torrent to announce in hex-encoded '
'format.')
parser.add_argument(
'downloaded', type=int,
help='Downloaded bytes to announce.')
parser.add_argument(
'left', type=int,
help='Left bytes to announce.')
parser.add_argument(
'uploaded', type=int,
help='Uploaded bytes to announce.')
parser.add_argument(
'--num-want', '-n', type=int, default=160,
help='Maximum number of peers to peers to request. '
'Defaults to 160.')
parser.add_argument(
'--event', '-e', default='none',
choices=['none', 'completed', 'started', 'stopped'],
help='The event to announce. Defaults to "none".')
try:
args = parser.parse_args(arg.split())
except argparse.ArgumentError:
return
args.event = [
'none',
'completed',
'started',
'stopped'
].index(args.event)
try:
ret = self.loop.run_until_complete(self.client.announce(
args.infohash,
args.downloaded,
args.left,
args.uploaded,
args.event,
args.num_want))
if ret:
print('Received {} peer(s) from the tracker:'.format(len(ret)))
for host, port in ret:
print(' {}:{}'.format(host, port))
else:
print('No peers received from the tracker.')
except ServerError as e:
print(e)
except TimeoutError:
print('Request timed out.')
def do_EOF(self, arg):
'Quit the shell.'
print()
self.close()
return True
def do_quit(self, arg):
'Quit the shell.'
self.close()
return True
def close(self):
self.loop.run_until_complete(self.client.stop())
self.loop.close()
self.is_closed = True
def setup_logging(args):
import sys
logger = logging.getLogger(__name__)
formatter = logging.Formatter(
'%(asctime) -15s - %(levelname) -8s - %(message)s')
level = {
'debug': logging.DEBUG,
'info': logging.INFO,
'warning': logging.WARNING,
'error': logging.ERROR,
'critical': logging.CRITICAL
}[args.log_level]
if args.log_to_stdout:
handler = logging.StreamHandler(sys.stdout)
handler.setFormatter(formatter)
handler.setLevel(level)
logger.addHandler(handler)
if args.log_file:
handler = logging.FileHandler(args.log_file)
handler.setFormatter(formatter)
handler.setLevel(level)
logger.addHandler(handler)
logger.setLevel(level)
def main():
parser = argparse.ArgumentParser(description='UDP tracker.')
parser.add_argument(
'tracker_uri', metavar='URI',
help='The tracker URI.')
parser.add_argument(
'--log-to-stdout', '-O', action='store_true', default=False,
help='Log to standard output.')
parser.add_argument('--log-file', '-l', help='Log to the specified file.')
parser.add_argument(
'--log-level', '-L', default='info',
choices=['debug', 'info', 'warning', 'error', 'critical'],
help='Set log level. Defaults to "info".')
parser.add_argument(
'--version', '-V', action='version',
version='pybtracker v' + __version__)
args = parser.parse_args()
setup_logging(args)
shell = ClientShell(args)
try:
shell.cmdloop()
except KeyboardInterrupt:
print()
finally:
if not shell.is_closed:
shell.close()
if __name__ == '__main__':
main()
| mit | -665,871,698,634,884,000 | 32.807107 | 90 | 0.555856 | false |
bmhatfield/Diamond | src/collectors/openstackswift/openstackswift.py | 31 | 3996 | # coding=utf-8
"""
Openstack swift collector.
#### Dependencies
* swift-dispersion-report commandline tool (for dispersion report)
if using this, make sure swift.conf and dispersion.conf are readable by
diamond also get an idea of the runtime of a swift-dispersion-report call
and make sure the collect interval is high enough to avoid contention.
* swift commandline tool (for container_metrics)
both of these should come installed with swift
"""
import diamond.collector
from subprocess import Popen, PIPE
try:
import json
except ImportError:
import simplejson as json
class OpenstackSwiftCollector(diamond.collector.Collector):
def get_default_config_help(self):
config_help = super(OpenstackSwiftCollector,
self).get_default_config_help()
config_help.update({
'enable_dispersion_report': 'gather swift-dispersion-report ' +
'metrics (default False)',
'enable_container_metrics': 'gather containers metrics ' +
'(# objects, bytes used, ' +
'x_timestamp. default True)',
'auth_url': 'authentication url (for enable_container_metrics)',
'account': 'swift auth account (for enable_container_metrics)',
'user': 'swift auth user (for enable_container_metrics)',
'password': 'swift auth password (for enable_container_metrics)',
'containers': 'containers on which to count number of objects, ' +
'space separated list (for enable_container_metrics)'
})
return config_help
def get_default_config(self):
"""
Returns the default collector settings
"""
config = super(OpenstackSwiftCollector, self).get_default_config()
config.update({
'path': 'openstackswift',
'enable_dispersion_report': False,
'enable_container_metrics': True,
# don't use the threaded model with this one.
# for some reason it crashes.
'interval': 1200, # by default, every 20 minutes
})
return config
def collect(self):
# dispersion report. this can take easily >60s. beware!
if (self.config['enable_dispersion_report']):
p = Popen(
['swift-dispersion-report', '-j'],
stdout=PIPE,
stderr=PIPE)
stdout, stderr = p.communicate()
self.publish('dispersion.errors', len(stderr.split('\n')) - 1)
data = json.loads(stdout)
for t in ('object', 'container'):
for (k, v) in data[t].items():
self.publish('dispersion.%s.%s' % (t, k), v)
# container metrics returned by stat <container>
if(self.config['enable_container_metrics']):
account = '%s:%s' % (self.config['account'], self.config['user'])
for container in self.config['containers'].split(','):
cmd = ['swift', '-A', self.config['auth_url'],
'-U', account,
'-K', self.config['password'],
'stat', container]
p = Popen(cmd, stdout=PIPE, stderr=PIPE)
stdout, stderr = p.communicate()
stats = {}
# stdout is some lines in 'key : val' format
for line in stdout.split('\n'):
if line:
line = line.split(':', 2)
stats[line[0].strip()] = line[1].strip()
key = 'container_metrics.%s.%s' % (self.config['account'],
container)
self.publish('%s.objects' % key, stats['Objects'])
self.publish('%s.bytes' % key, stats['Bytes'])
self.publish('%s.x_timestamp' % key, stats['X-Timestamp'])
| mit | 292,039,663,780,110,000 | 41.063158 | 79 | 0.543293 | false |
haad/ansible | lib/ansible/modules/network/f5/bigip_monitor_http.py | 3 | 18788 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2017 F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: bigip_monitor_http
short_description: Manages F5 BIG-IP LTM http monitors
description: Manages F5 BIG-IP LTM http monitors.
version_added: "2.5"
options:
name:
description:
- Monitor name.
required: True
aliases:
- monitor
parent:
description:
- The parent template of this monitor template. Once this value has
been set, it cannot be changed. By default, this value is the C(http)
parent on the C(Common) partition.
default: "/Common/http"
send:
description:
- The send string for the monitor call. When creating a new monitor, if
this value is not provided, the default C(GET /\r\n) will be used.
receive:
description:
- The receive string for the monitor call.
receive_disable:
description:
- This setting works like C(receive), except that the system marks the node
or pool member disabled when its response matches the C(receive_disable)
string but not C(receive). To use this setting, you must specify both
C(receive_disable) and C(receive).
ip:
description:
- IP address part of the IP/port definition. If this parameter is not
provided when creating a new monitor, then the default value will be
'*'.
port:
description:
- Port address part of the IP/port definition. If this parameter is not
provided when creating a new monitor, then the default value will be
'*'. Note that if specifying an IP address, a value between 1 and 65535
must be specified.
interval:
description:
- The interval specifying how frequently the monitor instance of this
template will run. If this parameter is not provided when creating
a new monitor, then the default value will be 5. This value B(must)
be less than the C(timeout) value.
timeout:
description:
- The number of seconds in which the node or service must respond to
the monitor request. If the target responds within the set time
period, it is considered up. If the target does not respond within
the set time period, it is considered down. You can change this
number to any number you want, however, it should be 3 times the
interval number of seconds plus 1 second. If this parameter is not
provided when creating a new monitor, then the default value will be 16.
time_until_up:
description:
- Specifies the amount of time in seconds after the first successful
response before a node will be marked up. A value of 0 will cause a
node to be marked up immediately after a valid response is received
from the node. If this parameter is not provided when creating
a new monitor, then the default value will be 0.
target_username:
description:
- Specifies the user name, if the monitored target requires authentication.
target_password:
description:
- Specifies the password, if the monitored target requires authentication.
partition:
description:
- Device partition to manage resources on.
default: Common
version_added: 2.5
state:
description:
- When C(present), ensures that the monitor exists.
- When C(absent), ensures the monitor is removed.
default: present
choices:
- present
- absent
version_added: 2.5
notes:
- Requires BIG-IP software version >= 12
extends_documentation_fragment: f5
author:
- Tim Rupp (@caphrim007)
'''
EXAMPLES = r'''
- name: Create HTTP Monitor
bigip_monitor_http:
state: present
ip: 10.10.10.10
server: lb.mydomain.com
user: admin
password: secret
name: my_http_monitor
delegate_to: localhost
- name: Remove HTTP Monitor
bigip_monitor_http:
state: absent
server: lb.mydomain.com
user: admin
password: secret
name: my_http_monitor
delegate_to: localhost
- name: Include a username and password in the HTTP monitor
bigip_monitor_http:
state: absent
server: lb.mydomain.com
user: admin
password: secret
name: my_http_monitor
target_username: monitor_user
target_password: monitor_pass
delegate_to: localhost
'''
RETURN = r'''
parent:
description: New parent template of the monitor.
returned: changed
type: string
sample: http
ip:
description: The new IP of IP/port definition.
returned: changed
type: string
sample: 10.12.13.14
interval:
description: The new interval in which to run the monitor check.
returned: changed
type: int
sample: 2
timeout:
description: The new timeout in which the remote system must respond to the monitor.
returned: changed
type: int
sample: 10
time_until_up:
description: The new time in which to mark a system as up after first successful response.
returned: changed
type: int
sample: 2
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.basic import env_fallback
HAS_DEVEL_IMPORTS = False
try:
# Sideband repository used for dev
from library.module_utils.network.f5.bigip import HAS_F5SDK
from library.module_utils.network.f5.bigip import F5Client
from library.module_utils.network.f5.common import F5ModuleError
from library.module_utils.network.f5.common import AnsibleF5Parameters
from library.module_utils.network.f5.common import cleanup_tokens
from library.module_utils.network.f5.common import fqdn_name
from library.module_utils.network.f5.common import f5_argument_spec
try:
from library.module_utils.network.f5.common import iControlUnexpectedHTTPError
except ImportError:
HAS_F5SDK = False
HAS_DEVEL_IMPORTS = True
except ImportError:
# Upstream Ansible
from ansible.module_utils.network.f5.bigip import HAS_F5SDK
from ansible.module_utils.network.f5.bigip import F5Client
from ansible.module_utils.network.f5.common import F5ModuleError
from ansible.module_utils.network.f5.common import AnsibleF5Parameters
from ansible.module_utils.network.f5.common import cleanup_tokens
from ansible.module_utils.network.f5.common import fqdn_name
from ansible.module_utils.network.f5.common import f5_argument_spec
try:
from ansible.module_utils.network.f5.common import iControlUnexpectedHTTPError
except ImportError:
HAS_F5SDK = False
try:
import netaddr
HAS_NETADDR = True
except ImportError:
HAS_NETADDR = False
class Parameters(AnsibleF5Parameters):
api_map = {
'timeUntilUp': 'time_until_up',
'defaultsFrom': 'parent',
'recv': 'receive'
}
api_attributes = [
'timeUntilUp', 'defaultsFrom', 'interval', 'timeout', 'recv', 'send',
'destination', 'username', 'password'
]
returnables = [
'parent', 'send', 'receive', 'ip', 'port', 'interval', 'timeout',
'time_until_up'
]
updatables = [
'destination', 'send', 'receive', 'interval', 'timeout', 'time_until_up',
'target_username', 'target_password'
]
def _fqdn_name(self, value):
if value is not None and not value.startswith('/'):
return '/{0}/{1}'.format(self.partition, value)
return value
def to_return(self):
result = {}
try:
for returnable in self.returnables:
result[returnable] = getattr(self, returnable)
result = self._filter_params(result)
except Exception:
pass
return result
@property
def destination(self):
if self.ip is None and self.port is None:
return None
destination = '{0}:{1}'.format(self.ip, self.port)
return destination
@destination.setter
def destination(self, value):
ip, port = value.split(':')
self._values['ip'] = ip
self._values['port'] = port
@property
def interval(self):
if self._values['interval'] is None:
return None
# Per BZ617284, the BIG-IP UI does not raise a warning about this.
# So I do
if 1 > int(self._values['interval']) > 86400:
raise F5ModuleError(
"Interval value must be between 1 and 86400"
)
return int(self._values['interval'])
@property
def timeout(self):
if self._values['timeout'] is None:
return None
return int(self._values['timeout'])
@property
def ip(self):
if self._values['ip'] is None:
return None
try:
if self._values['ip'] in ['*', '0.0.0.0']:
return '*'
result = str(netaddr.IPAddress(self._values['ip']))
return result
except netaddr.core.AddrFormatError:
raise F5ModuleError(
"The provided 'ip' parameter is not an IP address."
)
@property
def port(self):
if self._values['port'] is None:
return None
elif self._values['port'] == '*':
return '*'
return int(self._values['port'])
@property
def time_until_up(self):
if self._values['time_until_up'] is None:
return None
return int(self._values['time_until_up'])
@property
def parent(self):
if self._values['parent'] is None:
return None
result = self._fqdn_name(self._values['parent'])
return result
@property
def type(self):
return 'http'
@property
def username(self):
return self._values['target_username']
@property
def password(self):
return self._values['target_password']
class Changes(Parameters):
pass
class Difference(object):
def __init__(self, want, have=None):
self.want = want
self.have = have
def compare(self, param):
try:
result = getattr(self, param)
return result
except AttributeError:
result = self.__default(param)
return result
@property
def parent(self):
if self.want.parent != self.have.parent:
raise F5ModuleError(
"The parent monitor cannot be changed"
)
@property
def destination(self):
if self.want.ip is None and self.want.port is None:
return None
if self.want.port is None:
self.want.update({'port': self.have.port})
if self.want.ip is None:
self.want.update({'ip': self.have.ip})
if self.want.port in [None, '*'] and self.want.ip != '*':
raise F5ModuleError(
"Specifying an IP address requires that a port number be specified"
)
if self.want.destination != self.have.destination:
return self.want.destination
@property
def interval(self):
if self.want.timeout is not None and self.want.interval is not None:
if self.want.interval >= self.want.timeout:
raise F5ModuleError(
"Parameter 'interval' must be less than 'timeout'."
)
elif self.want.timeout is not None:
if self.have.interval >= self.want.timeout:
raise F5ModuleError(
"Parameter 'interval' must be less than 'timeout'."
)
elif self.want.interval is not None:
if self.want.interval >= self.have.timeout:
raise F5ModuleError(
"Parameter 'interval' must be less than 'timeout'."
)
if self.want.interval != self.have.interval:
return self.want.interval
def __default(self, param):
attr1 = getattr(self.want, param)
try:
attr2 = getattr(self.have, param)
if attr1 != attr2:
return attr1
except AttributeError:
return attr1
class ModuleManager(object):
def __init__(self, *args, **kwargs):
self.module = kwargs.get('module', None)
self.client = kwargs.get('client', None)
self.have = None
self.want = Parameters(params=self.module.params)
self.changes = Changes()
def _set_changed_options(self):
changed = {}
for key in Parameters.returnables:
if getattr(self.want, key) is not None:
changed[key] = getattr(self.want, key)
if changed:
self.changes = Changes(params=changed)
def _update_changed_options(self):
diff = Difference(self.want, self.have)
updatables = Parameters.updatables
changed = dict()
for k in updatables:
change = diff.compare(k)
if change is None:
continue
else:
if isinstance(change, dict):
changed.update(change)
else:
changed[k] = change
if changed:
self.changes = Changes(params=changed)
return True
return False
def _announce_deprecations(self):
warnings = []
if self.want:
warnings += self.want._values.get('__warnings', [])
if self.have:
warnings += self.have._values.get('__warnings', [])
for warning in warnings:
self.module.deprecate(
msg=warning['msg'],
version=warning['version']
)
def exec_module(self):
changed = False
result = dict()
state = self.want.state
try:
if state == "present":
changed = self.present()
elif state == "absent":
changed = self.absent()
except iControlUnexpectedHTTPError as e:
raise F5ModuleError(str(e))
changes = self.changes.to_return()
result.update(**changes)
result.update(dict(changed=changed))
self._announce_deprecations()
return result
def present(self):
if self.exists():
return self.update()
else:
return self.create()
def create(self):
self._set_changed_options()
if self.want.timeout is None:
self.want.update({'timeout': 16})
if self.want.interval is None:
self.want.update({'interval': 5})
if self.want.time_until_up is None:
self.want.update({'time_until_up': 0})
if self.want.ip is None:
self.want.update({'ip': '*'})
if self.want.port is None:
self.want.update({'port': '*'})
if self.want.send is None:
self.want.update({'send': 'GET /\r\n'})
if self.module.check_mode:
return True
self.create_on_device()
return True
def should_update(self):
result = self._update_changed_options()
if result:
return True
return False
def update(self):
self.have = self.read_current_from_device()
if not self.should_update():
return False
if self.module.check_mode:
return True
self.update_on_device()
return True
def absent(self):
if self.exists():
return self.remove()
return False
def remove(self):
if self.module.check_mode:
return True
self.remove_from_device()
if self.exists():
raise F5ModuleError("Failed to delete the monitor.")
return True
def read_current_from_device(self):
resource = self.client.api.tm.ltm.monitor.https.http.load(
name=self.want.name,
partition=self.want.partition
)
result = resource.attrs
return Parameters(params=result)
def exists(self):
result = self.client.api.tm.ltm.monitor.https.http.exists(
name=self.want.name,
partition=self.want.partition
)
return result
def update_on_device(self):
params = self.want.api_params()
result = self.client.api.tm.ltm.monitor.https.http.load(
name=self.want.name,
partition=self.want.partition
)
result.modify(**params)
def create_on_device(self):
params = self.want.api_params()
self.client.api.tm.ltm.monitor.https.http.create(
name=self.want.name,
partition=self.want.partition,
**params
)
def remove_from_device(self):
result = self.client.api.tm.ltm.monitor.https.http.load(
name=self.want.name,
partition=self.want.partition
)
if result:
result.delete()
class ArgumentSpec(object):
def __init__(self):
self.supports_check_mode = True
argument_spec = dict(
name=dict(required=True),
parent=dict(default='/Common/http'),
send=dict(),
receive=dict(),
receive_disable=dict(required=False),
ip=dict(),
port=dict(type='int'),
interval=dict(type='int'),
timeout=dict(type='int'),
time_until_up=dict(type='int'),
target_username=dict(),
target_password=dict(no_log=True),
state=dict(
default='present',
choices=['present', 'absent']
),
partition=dict(
default='Common',
fallback=(env_fallback, ['F5_PARTITION'])
)
)
self.argument_spec = {}
self.argument_spec.update(f5_argument_spec)
self.argument_spec.update(argument_spec)
def main():
spec = ArgumentSpec()
module = AnsibleModule(
argument_spec=spec.argument_spec,
supports_check_mode=spec.supports_check_mode
)
if not HAS_F5SDK:
module.fail_json(msg="The python f5-sdk module is required")
if not HAS_NETADDR:
module.fail_json(msg="The python netaddr module is required")
try:
client = F5Client(**module.params)
mm = ModuleManager(module=module, client=client)
results = mm.exec_module()
cleanup_tokens(client)
module.exit_json(**results)
except F5ModuleError as ex:
cleanup_tokens(client)
module.fail_json(msg=str(ex))
if __name__ == '__main__':
main()
| gpl-3.0 | 3,511,605,982,378,780,000 | 29.952224 | 92 | 0.601874 | false |
flavour/cedarbluff | modules/s3/fontmap/SazanamiMincho.py | 58 | 68515 | #!/usr/bin/env python
SazanamiMincho_map = [
(0, 1),
(32, 127),
(160, 384),
(402, 403),
(461, 477),
(501, 502),
(506, 512),
(592, 681),
(710, 712),
(713, 716),
(728, 734),
(884, 886),
(890, 891),
(894, 895),
(900, 907),
(908, 909),
(910, 930),
(931, 975),
(976, 978),
(981, 982),
(1013, 1014),
(1025, 1037),
(1038, 1104),
(1105, 1117),
(1118, 1159),
(1168, 1221),
(1223, 1225),
(1227, 1229),
(1232, 1260),
(1262, 1270),
(1272, 1274),
(7808, 7814),
(7922, 7924),
(8192, 8239),
(8240, 8263),
(8304, 8305),
(8308, 8335),
(8352, 8363),
(8364, 8365),
(8448, 8505),
(8531, 8579),
(8592, 8683),
(8704, 8946),
(8962, 8963),
(8967, 8968),
(8976, 8977),
(8978, 8979),
(8992, 8994),
(9312, 9451),
(9472, 9622),
(9632, 9712),
(9728, 9748),
(9754, 9840),
(9985, 9989),
(9990, 9994),
(9996, 10024),
(10025, 10060),
(10061, 10062),
(10063, 10067),
(10070, 10071),
(10072, 10079),
(10081, 10088),
(10102, 10133),
(10136, 10160),
(10161, 10175),
(12288, 12320),
(12336, 12337),
(12339, 12342),
(12353, 12439),
(12443, 12448),
(12449, 12543),
(12849, 12851),
(12857, 12858),
(12964, 12969),
(13059, 13060),
(13069, 13070),
(13076, 13077),
(13080, 13081),
(13090, 13092),
(13094, 13096),
(13099, 13100),
(13110, 13111),
(13115, 13116),
(13129, 13131),
(13133, 13134),
(13137, 13138),
(13143, 13144),
(13179, 13183),
(13198, 13200),
(13212, 13215),
(13217, 13218),
(13252, 13253),
(13259, 13260),
(13261, 13262),
(19968, 19974),
(19975, 19983),
(19984, 19987),
(19988, 19994),
(19998, 20000),
(20001, 20002),
(20003, 20005),
(20006, 20007),
(20008, 20009),
(20010, 20012),
(20013, 20019),
(20021, 20023),
(20024, 20026),
(20027, 20029),
(20031, 20038),
(20039, 20040),
(20043, 20044),
(20045, 20048),
(20049, 20050),
(20053, 20059),
(20060, 20064),
(20066, 20068),
(20072, 20074),
(20081, 20082),
(20083, 20086),
(20089, 20090),
(20094, 20097),
(20098, 20099),
(20101, 20103),
(20104, 20111),
(20113, 20115),
(20116, 20122),
(20123, 20131),
(20132, 20135),
(20136, 20137),
(20139, 20145),
(20147, 20148),
(20150, 20151),
(20153, 20155),
(20160, 20165),
(20166, 20168),
(20170, 20172),
(20173, 20177),
(20180, 20188),
(20189, 20198),
(20200, 20201),
(20205, 20212),
(20213, 20216),
(20219, 20228),
(20232, 20243),
(20245, 20248),
(20249, 20251),
(20252, 20254),
(20270, 20274),
(20275, 20287),
(20288, 20289),
(20290, 20292),
(20294, 20298),
(20299, 20321),
(20323, 20324),
(20329, 20331),
(20332, 20333),
(20334, 20338),
(20339, 20340),
(20341, 20352),
(20353, 20359),
(20360, 20373),
(20374, 20380),
(20381, 20386),
(20395, 20396),
(20397, 20400),
(20402, 20403),
(20405, 20408),
(20409, 20410),
(20411, 20423),
(20424, 20435),
(20436, 20437),
(20439, 20441),
(20442, 20446),
(20447, 20454),
(20462, 20465),
(20466, 20468),
(20469, 20471),
(20472, 20473),
(20474, 20475),
(20476, 20482),
(20484, 20488),
(20489, 20501),
(20502, 20512),
(20513, 20527),
(20528, 20529),
(20530, 20532),
(20533, 20535),
(20537, 20538),
(20539, 20540),
(20544, 20548),
(20549, 20555),
(20556, 20557),
(20558, 20564),
(20565, 20568),
(20569, 20571),
(20572, 20573),
(20575, 20577),
(20578, 20580),
(20581, 20584),
(20586, 20587),
(20588, 20590),
(20592, 20595),
(20596, 20599),
(20600, 20601),
(20605, 20606),
(20608, 20610),
(20611, 20615),
(20618, 20619),
(20621, 20629),
(20630, 20631),
(20632, 20637),
(20638, 20643),
(20650, 20651),
(20652, 20654),
(20655, 20657),
(20658, 20662),
(20663, 20664),
(20665, 20667),
(20669, 20671),
(20672, 20673),
(20674, 20678),
(20679, 20680),
(20681, 20683),
(20684, 20690),
(20691, 20695),
(20696, 20697),
(20698, 20699),
(20700, 20704),
(20706, 20714),
(20717, 20720),
(20721, 20723),
(20724, 20727),
(20729, 20732),
(20734, 20735),
(20736, 20741),
(20742, 20746),
(20747, 20751),
(20752, 20753),
(20754, 20755),
(20756, 20768),
(20769, 20770),
(20771, 20772),
(20775, 20777),
(20778, 20779),
(20780, 20782),
(20783, 20784),
(20785, 20790),
(20791, 20797),
(20799, 20817),
(20818, 20822),
(20823, 20825),
(20826, 20827),
(20828, 20829),
(20831, 20832),
(20834, 20835),
(20836, 20839),
(20840, 20847),
(20849, 20850),
(20853, 20857),
(20860, 20861),
(20862, 20863),
(20864, 20865),
(20866, 20871),
(20873, 20884),
(20885, 20890),
(20893, 20894),
(20896, 20903),
(20904, 20910),
(20912, 20921),
(20922, 20923),
(20924, 20928),
(20930, 20931),
(20932, 20935),
(20936, 20938),
(20939, 20942),
(20943, 20944),
(20945, 20948),
(20949, 20951),
(20952, 20953),
(20955, 20959),
(20960, 20963),
(20965, 20968),
(20969, 20971),
(20972, 20975),
(20976, 20987),
(20989, 20991),
(20992, 21001),
(21002, 21004),
(21006, 21007),
(21009, 21017),
(21021, 21022),
(21026, 21027),
(21028, 21030),
(21031, 21035),
(21038, 21039),
(21040, 21044),
(21045, 21053),
(21059, 21062),
(21063, 21064),
(21065, 21070),
(21071, 21072),
(21076, 21081),
(21082, 21085),
(21086, 21090),
(21091, 21095),
(21097, 21099),
(21102, 21110),
(21111, 21114),
(21117, 21118),
(21119, 21121),
(21122, 21124),
(21125, 21126),
(21127, 21131),
(21132, 21134),
(21137, 21145),
(21146, 21149),
(21151, 21153),
(21155, 21160),
(21161, 21166),
(21167, 21170),
(21172, 21183),
(21184, 21186),
(21187, 21194),
(21196, 21198),
(21199, 21200),
(21201, 21203),
(21204, 21210),
(21211, 21227),
(21228, 21229),
(21232, 21243),
(21246, 21252),
(21253, 21257),
(21258, 21262),
(21263, 21266),
(21267, 21268),
(21269, 21282),
(21283, 21286),
(21287, 21294),
(21295, 21300),
(21301, 21302),
(21304, 21316),
(21317, 21326),
(21329, 21333),
(21335, 21341),
(21342, 21343),
(21344, 21346),
(21347, 21348),
(21349, 21351),
(21353, 21354),
(21356, 21366),
(21367, 21370),
(21371, 21372),
(21374, 21376),
(21378, 21381),
(21383, 21385),
(21390, 21391),
(21395, 21397),
(21398, 21399),
(21400, 21403),
(21405, 21406),
(21407, 21410),
(21412, 21415),
(21416, 21420),
(21421, 21425),
(21426, 21433),
(21434, 21436),
(21437, 21438),
(21440, 21441),
(21442, 21444),
(21445, 21446),
(21448, 21456),
(21458, 21464),
(21465, 21468),
(21469, 21492),
(21493, 21497),
(21498, 21499),
(21505, 21509),
(21512, 21522),
(21523, 21524),
(21530, 21532),
(21533, 21534),
(21535, 21538),
(21542, 21552),
(21553, 21554),
(21556, 21559),
(21560, 21562),
(21563, 21567),
(21568, 21569),
(21570, 21573),
(21574, 21579),
(21581, 21584),
(21585, 21586),
(21598, 21600),
(21602, 21603),
(21604, 21605),
(21606, 21612),
(21613, 21615),
(21616, 21618),
(21619, 21624),
(21627, 21630),
(21631, 21634),
(21635, 21639),
(21640, 21651),
(21653, 21655),
(21660, 21661),
(21663, 21664),
(21665, 21667),
(21668, 21680),
(21681, 21684),
(21687, 21699),
(21700, 21701),
(21702, 21707),
(21709, 21711),
(21720, 21721),
(21728, 21731),
(21733, 21735),
(21736, 21739),
(21740, 21744),
(21746, 21747),
(21750, 21751),
(21754, 21755),
(21756, 21762),
(21764, 21770),
(21772, 21777),
(21780, 21783),
(21802, 21804),
(21806, 21808),
(21809, 21812),
(21813, 21815),
(21816, 21818),
(21819, 21823),
(21824, 21826),
(21828, 21832),
(21833, 21835),
(21836, 21838),
(21839, 21842),
(21843, 21844),
(21846, 21849),
(21850, 21855),
(21856, 21858),
(21859, 21861),
(21862, 21863),
(21883, 21885),
(21886, 21893),
(21894, 21900),
(21902, 21904),
(21905, 21909),
(21911, 21915),
(21916, 21920),
(21923, 21925),
(21927, 21935),
(21936, 21937),
(21938, 21939),
(21942, 21943),
(21951, 21952),
(21953, 21954),
(21955, 21960),
(21961, 21962),
(21963, 21965),
(21966, 21967),
(21969, 21973),
(21975, 21977),
(21978, 21981),
(21982, 21984),
(21986, 21989),
(21993, 21994),
(22006, 22008),
(22009, 22010),
(22013, 22016),
(22021, 22023),
(22024, 22027),
(22029, 22035),
(22036, 22037),
(22038, 22042),
(22043, 22044),
(22057, 22058),
(22060, 22061),
(22063, 22074),
(22075, 22078),
(22079, 22085),
(22086, 22087),
(22089, 22090),
(22091, 22097),
(22100, 22101),
(22107, 22108),
(22110, 22111),
(22112, 22117),
(22118, 22119),
(22120, 22126),
(22127, 22128),
(22129, 22131),
(22132, 22134),
(22136, 22137),
(22138, 22139),
(22144, 22145),
(22148, 22153),
(22154, 22157),
(22159, 22160),
(22164, 22166),
(22169, 22171),
(22173, 22177),
(22178, 22179),
(22181, 22186),
(22187, 22191),
(22193, 22194),
(22195, 22197),
(22198, 22200),
(22204, 22205),
(22206, 22207),
(22208, 22212),
(22213, 22214),
(22216, 22226),
(22227, 22228),
(22231, 22242),
(22243, 22249),
(22251, 22252),
(22253, 22255),
(22256, 22260),
(22262, 22264),
(22265, 22267),
(22269, 22270),
(22271, 22277),
(22279, 22286),
(22287, 22288),
(22289, 22292),
(22293, 22295),
(22296, 22297),
(22298, 22302),
(22303, 22305),
(22306, 22315),
(22316, 22321),
(22323, 22325),
(22327, 22329),
(22331, 22332),
(22333, 22337),
(22338, 22339),
(22341, 22344),
(22346, 22347),
(22348, 22355),
(22361, 22362),
(22369, 22371),
(22372, 22380),
(22381, 22386),
(22387, 22390),
(22391, 22392),
(22393, 22397),
(22398, 22400),
(22401, 22404),
(22408, 22410),
(22411, 22413),
(22419, 22422),
(22423, 22424),
(22425, 22427),
(22428, 22437),
(22439, 22443),
(22444, 22445),
(22448, 22449),
(22451, 22452),
(22456, 22457),
(22461, 22462),
(22464, 22465),
(22467, 22468),
(22470, 22473),
(22475, 22477),
(22478, 22480),
(22482, 22487),
(22492, 22498),
(22499, 22501),
(22502, 22504),
(22505, 22506),
(22509, 22510),
(22512, 22513),
(22516, 22523),
(22524, 22529),
(22530, 22535),
(22536, 22542),
(22549, 22550),
(22553, 22554),
(22555, 22556),
(22557, 22562),
(22564, 22565),
(22566, 22568),
(22570, 22571),
(22573, 22574),
(22575, 22579),
(22580, 22582),
(22585, 22587),
(22589, 22590),
(22591, 22594),
(22601, 22606),
(22607, 22611),
(22612, 22614),
(22615, 22619),
(22622, 22624),
(22625, 22627),
(22628, 22629),
(22631, 22634),
(22635, 22636),
(22640, 22641),
(22642, 22643),
(22645, 22646),
(22648, 22650),
(22652, 22653),
(22654, 22658),
(22659, 22660),
(22661, 22662),
(22663, 22667),
(22668, 22670),
(22671, 22673),
(22675, 22677),
(22678, 22680),
(22684, 22691),
(22694, 22695),
(22696, 22698),
(22699, 22700),
(22702, 22703),
(22705, 22708),
(22712, 22717),
(22718, 22719),
(22721, 22723),
(22724, 22726),
(22727, 22729),
(22730, 22731),
(22732, 22735),
(22736, 22747),
(22748, 22752),
(22753, 22755),
(22756, 22758),
(22761, 22762),
(22763, 22765),
(22766, 22772),
(22775, 22776),
(22777, 22782),
(22786, 22787),
(22789, 22791),
(22793, 22797),
(22799, 22801),
(22802, 22807),
(22808, 22814),
(22817, 22822),
(22823, 22836),
(22837, 22841),
(22846, 22848),
(22851, 22853),
(22854, 22858),
(22862, 22870),
(22871, 22876),
(22877, 22884),
(22885, 22886),
(22887, 22896),
(22898, 22903),
(22904, 22906),
(22907, 22910),
(22913, 22917),
(22922, 22927),
(22930, 22932),
(22933, 22936),
(22937, 22938),
(22939, 22940),
(22941, 22942),
(22943, 22944),
(22947, 22950),
(22951, 22953),
(22956, 22961),
(22962, 22964),
(22967, 22968),
(22969, 22973),
(22974, 22975),
(22977, 22978),
(22979, 22981),
(22982, 22983),
(22984, 22988),
(22989, 22990),
(22992, 22997),
(23001, 23003),
(23004, 23008),
(23011, 23017),
(23018, 23020),
(23022, 23024),
(23025, 23027),
(23028, 23029),
(23030, 23032),
(23035, 23036),
(23039, 23042),
(23043, 23045),
(23049, 23050),
(23052, 23055),
(23057, 23060),
(23064, 23065),
(23066, 23067),
(23068, 23069),
(23070, 23073),
(23075, 23078),
(23079, 23083),
(23085, 23086),
(23087, 23089),
(23093, 23095),
(23100, 23101),
(23104, 23106),
(23108, 23114),
(23116, 23117),
(23120, 23121),
(23125, 23126),
(23130, 23131),
(23134, 23135),
(23138, 23140),
(23141, 23144),
(23146, 23147),
(23148, 23150),
(23159, 23160),
(23162, 23164),
(23166, 23168),
(23179, 23180),
(23184, 23185),
(23186, 23188),
(23190, 23191),
(23193, 23197),
(23198, 23201),
(23202, 23203),
(23207, 23208),
(23212, 23213),
(23217, 23220),
(23221, 23222),
(23224, 23225),
(23226, 23232),
(23233, 23235),
(23236, 23237),
(23238, 23239),
(23240, 23242),
(23243, 23245),
(23247, 23249),
(23254, 23256),
(23258, 23259),
(23260, 23261),
(23264, 23266),
(23267, 23268),
(23269, 23271),
(23273, 23275),
(23278, 23279),
(23285, 23287),
(23290, 23292),
(23293, 23294),
(23296, 23298),
(23304, 23306),
(23307, 23309),
(23318, 23320),
(23321, 23322),
(23323, 23324),
(23325, 23326),
(23329, 23331),
(23333, 23334),
(23338, 23339),
(23340, 23342),
(23344, 23345),
(23346, 23347),
(23348, 23349),
(23350, 23351),
(23352, 23353),
(23358, 23359),
(23360, 23362),
(23363, 23364),
(23365, 23366),
(23371, 23373),
(23376, 23379),
(23380, 23385),
(23386, 23392),
(23395, 23399),
(23400, 23402),
(23403, 23404),
(23406, 23410),
(23411, 23412),
(23413, 23414),
(23416, 23417),
(23418, 23419),
(23420, 23426),
(23427, 23442),
(23443, 23454),
(23455, 23456),
(23458, 23463),
(23464, 23466),
(23468, 23483),
(23484, 23485),
(23487, 23496),
(23497, 23498),
(23500, 23502),
(23503, 23505),
(23506, 23509),
(23510, 23516),
(23517, 23523),
(23524, 23530),
(23531, 23533),
(23534, 23538),
(23539, 23543),
(23544, 23545),
(23546, 23547),
(23549, 23552),
(23553, 23555),
(23556, 23568),
(23569, 23570),
(23571, 23572),
(23574, 23576),
(23578, 23579),
(23582, 23585),
(23586, 23589),
(23590, 23591),
(23592, 23594),
(23595, 23599),
(23600, 23603),
(23605, 23607),
(23608, 23618),
(23621, 23623),
(23624, 23625),
(23626, 23628),
(23629, 23634),
(23635, 23636),
(23637, 23638),
(23641, 23643),
(23644, 23645),
(23646, 23647),
(23648, 23654),
(23655, 23658),
(23660, 23666),
(23668, 23671),
(23673, 23678),
(23687, 23689),
(23690, 23691),
(23692, 23693),
(23695, 23699),
(23700, 23701),
(23709, 23710),
(23711, 23716),
(23718, 23719),
(23720, 23725),
(23729, 23737),
(23738, 23741),
(23742, 23743),
(23749, 23750),
(23751, 23752),
(23753, 23754),
(23755, 23756),
(23762, 23763),
(23767, 23768),
(23769, 23770),
(23773, 23774),
(23776, 23778),
(23784, 23787),
(23789, 23795),
(23796, 23799),
(23802, 23804),
(23805, 23806),
(23809, 23810),
(23814, 23816),
(23819, 23820),
(23821, 23823),
(23825, 23827),
(23828, 23836),
(23839, 23840),
(23842, 23845),
(23846, 23848),
(23849, 23850),
(23851, 23852),
(23857, 23858),
(23860, 23861),
(23865, 23866),
(23869, 23870),
(23871, 23872),
(23874, 23876),
(23878, 23879),
(23880, 23881),
(23882, 23885),
(23886, 23887),
(23888, 23891),
(23893, 23894),
(23897, 23898),
(23900, 23901),
(23903, 23907),
(23908, 23909),
(23913, 23915),
(23916, 23918),
(23919, 23921),
(23923, 23924),
(23926, 23927),
(23929, 23931),
(23934, 23936),
(23937, 23941),
(23943, 23945),
(23946, 23949),
(23952, 23953),
(23954, 23958),
(23961, 23962),
(23963, 23964),
(23965, 23966),
(23967, 23969),
(23970, 23971),
(23975, 23976),
(23979, 23981),
(23982, 23983),
(23984, 23985),
(23986, 23987),
(23988, 23989),
(23991, 23995),
(23996, 23998),
(24003, 24004),
(24007, 24008),
(24009, 24010),
(24011, 24015),
(24016, 24017),
(24018, 24020),
(24022, 24023),
(24024, 24026),
(24027, 24028),
(24029, 24031),
(24032, 24034),
(24035, 24042),
(24043, 24044),
(24046, 24047),
(24049, 24054),
(24055, 24058),
(24059, 24060),
(24061, 24063),
(24064, 24065),
(24066, 24068),
(24070, 24072),
(24075, 24078),
(24081, 24083),
(24084, 24087),
(24088, 24092),
(24093, 24094),
(24095, 24097),
(24101, 24102),
(24104, 24105),
(24107, 24108),
(24109, 24113),
(24114, 24116),
(24117, 24121),
(24125, 24127),
(24128, 24129),
(24131, 24134),
(24135, 24136),
(24137, 24138),
(24139, 24141),
(24142, 24143),
(24144, 24146),
(24148, 24153),
(24155, 24157),
(24158, 24160),
(24161, 24165),
(24168, 24169),
(24170, 24175),
(24176, 24177),
(24178, 24183),
(24184, 24194),
(24195, 24197),
(24199, 24200),
(24202, 24204),
(24206, 24208),
(24213, 24216),
(24218, 24219),
(24220, 24221),
(24224, 24225),
(24226, 24227),
(24228, 24233),
(24234, 24238),
(24241, 24242),
(24243, 24244),
(24245, 24249),
(24253, 24256),
(24257, 24260),
(24262, 24263),
(24264, 24269),
(24270, 24279),
(24282, 24292),
(24293, 24294),
(24296, 24298),
(24299, 24301),
(24304, 24306),
(24307, 24309),
(24310, 24313),
(24314, 24317),
(24318, 24320),
(24321, 24325),
(24326, 24338),
(24339, 24346),
(24347, 24350),
(24351, 24352),
(24353, 24362),
(24363, 24370),
(24372, 24377),
(24379, 24386),
(24388, 24390),
(24391, 24393),
(24394, 24395),
(24396, 24399),
(24400, 24402),
(24403, 24405),
(24406, 24410),
(24411, 24414),
(24416, 24421),
(24422, 24424),
(24425, 24430),
(24431, 24438),
(24439, 24443),
(24444, 24454),
(24455, 24462),
(24463, 24468),
(24470, 24474),
(24476, 24479),
(24480, 24483),
(24484, 24485),
(24487, 24498),
(24499, 24501),
(24503, 24506),
(24508, 24510),
(24515, 24518),
(24519, 24522),
(24523, 24526),
(24528, 24533),
(24534, 24538),
(24540, 24543),
(24544, 24547),
(24548, 24549),
(24552, 24564),
(24565, 24567),
(24568, 24569),
(24570, 24574),
(24575, 24576),
(24583, 24584),
(24586, 24587),
(24589, 24593),
(24594, 24606),
(24607, 24610),
(24612, 24620),
(24621, 24622),
(24623, 24624),
(24625, 24626),
(24627, 24628),
(24629, 24630),
(24634, 24635),
(24640, 24644),
(24646, 24654),
(24656, 24659),
(24660, 24664),
(24665, 24667),
(24669, 24670),
(24671, 24678),
(24679, 24686),
(24687, 24690),
(24693, 24694),
(24695, 24696),
(24702, 24704),
(24705, 24711),
(24712, 24719),
(24721, 24729),
(24730, 24732),
(24733, 24737),
(24738, 24747),
(24752, 24761),
(24763, 24767),
(24770, 24771),
(24772, 24780),
(24782, 24784),
(24785, 24786),
(24787, 24790),
(24792, 24804),
(24805, 24806),
(24807, 24809),
(24816, 24830),
(24832, 24836),
(24838, 24843),
(24844, 24856),
(24857, 24861),
(24862, 24867),
(24871, 24873),
(24874, 24877),
(24880, 24882),
(24884, 24888),
(24889, 24890),
(24892, 24896),
(24897, 24899),
(24900, 24911),
(24915, 24916),
(24917, 24918),
(24920, 24923),
(24925, 24929),
(24930, 24932),
(24933, 24934),
(24935, 24937),
(24939, 24941),
(24942, 24953),
(24955, 24957),
(24958, 24965),
(24967, 24968),
(24970, 24972),
(24973, 24975),
(24976, 24981),
(24982, 24987),
(24988, 24990),
(24991, 24993),
(24996, 24998),
(24999, 25007),
(25010, 25011),
(25014, 25015),
(25016, 25019),
(25020, 25021),
(25022, 25023),
(25024, 25028),
(25030, 25041),
(25045, 25046),
(25052, 25056),
(25057, 25060),
(25061, 25064),
(25065, 25066),
(25068, 25070),
(25071, 25072),
(25074, 25075),
(25076, 25077),
(25078, 25081),
(25082, 25083),
(25084, 25090),
(25091, 25093),
(25095, 25099),
(25100, 25103),
(25104, 25111),
(25114, 25124),
(25126, 25128),
(25129, 25132),
(25134, 25137),
(25138, 25141),
(25144, 25146),
(25147, 25148),
(25149, 25150),
(25151, 25157),
(25158, 25162),
(25163, 25167),
(25168, 25175),
(25176, 25177),
(25178, 25181),
(25182, 25183),
(25184, 25185),
(25187, 25189),
(25192, 25193),
(25197, 25200),
(25201, 25202),
(25203, 25204),
(25206, 25207),
(25209, 25211),
(25212, 25217),
(25218, 25221),
(25225, 25227),
(25229, 25241),
(25243, 25245),
(25246, 25247),
(25254, 25255),
(25256, 25257),
(25259, 25261),
(25265, 25266),
(25267, 25268),
(25269, 25272),
(25273, 25280),
(25282, 25283),
(25284, 25291),
(25292, 25310),
(25312, 25314),
(25322, 25323),
(25324, 25328),
(25329, 25336),
(25340, 25344),
(25345, 25349),
(25351, 25358),
(25360, 25362),
(25363, 25364),
(25366, 25367),
(25368, 25370),
(25375, 25376),
(25383, 25388),
(25389, 25390),
(25391, 25392),
(25397, 25399),
(25401, 25403),
(25404, 25408),
(25409, 25413),
(25414, 25415),
(25417, 25425),
(25426, 25430),
(25431, 25433),
(25435, 25437),
(25445, 25450),
(25451, 25455),
(25457, 25459),
(25460, 25465),
(25466, 25470),
(25471, 25473),
(25474, 25477),
(25479, 25483),
(25484, 25485),
(25486, 25489),
(25490, 25491),
(25492, 25495),
(25496, 25500),
(25502, 25520),
(25522, 25523),
(25524, 25526),
(25531, 25532),
(25533, 25535),
(25536, 25538),
(25539, 25543),
(25544, 25546),
(25550, 25559),
(25562, 25565),
(25568, 25570),
(25571, 25572),
(25573, 25574),
(25577, 25579),
(25580, 25581),
(25582, 25583),
(25586, 25591),
(25592, 25595),
(25606, 25607),
(25609, 25611),
(25613, 25614),
(25615, 25617),
(25618, 25621),
(25622, 25625),
(25628, 25629),
(25630, 25631),
(25632, 25633),
(25634, 25635),
(25636, 25639),
(25640, 25643),
(25644, 25646),
(25647, 25649),
(25652, 25655),
(25658, 25659),
(25661, 25664),
(25666, 25667),
(25675, 25676),
(25678, 25680),
(25681, 25685),
(25688, 25689),
(25690, 25694),
(25695, 25698),
(25699, 25700),
(25703, 25704),
(25705, 25706),
(25709, 25710),
(25711, 25712),
(25715, 25717),
(25718, 25719),
(25720, 25721),
(25722, 25724),
(25725, 25726),
(25731, 25732),
(25733, 25734),
(25735, 25737),
(25743, 25748),
(25749, 25750),
(25752, 25756),
(25757, 25760),
(25761, 25762),
(25763, 25767),
(25768, 25770),
(25771, 25775),
(25776, 25777),
(25778, 25780),
(25785, 25786),
(25787, 25792),
(25793, 25795),
(25796, 25798),
(25799, 25800),
(25801, 25807),
(25808, 25811),
(25812, 25814),
(25815, 25817),
(25818, 25819),
(25824, 25832),
(25833, 25835),
(25836, 25838),
(25839, 25843),
(25844, 25848),
(25850, 25852),
(25853, 25858),
(25860, 25862),
(25864, 25867),
(25871, 25872),
(25875, 25877),
(25878, 25879),
(25880, 25882),
(25883, 25888),
(25890, 25893),
(25894, 25895),
(25897, 25901),
(25902, 25904),
(25905, 25906),
(25908, 25920),
(25923, 25924),
(25925, 25926),
(25927, 25930),
(25933, 25939),
(25940, 25946),
(25949, 25953),
(25954, 25956),
(25958, 25960),
(25963, 25965),
(25968, 25969),
(25970, 25971),
(25972, 25974),
(25975, 25977),
(25978, 25979),
(25981, 25982),
(25985, 25988),
(25989, 25990),
(25991, 25995),
(25996, 25997),
(25998, 25999),
(26000, 26003),
(26005, 26006),
(26007, 26010),
(26011, 26014),
(26015, 26018),
(26019, 26024),
(26027, 26033),
(26034, 26037),
(26039, 26040),
(26041, 26042),
(26044, 26046),
(26047, 26048),
(26049, 26055),
(26056, 26058),
(26059, 26061),
(26062, 26065),
(26066, 26067),
(26068, 26069),
(26070, 26074),
(26075, 26076),
(26079, 26083),
(26085, 26090),
(26092, 26094),
(26096, 26099),
(26100, 26102),
(26105, 26108),
(26110, 26113),
(26114, 26117),
(26118, 26123),
(26124, 26128),
(26129, 26135),
(26140, 26162),
(26163, 26168),
(26169, 26170),
(26172, 26173),
(26175, 26183),
(26185, 26189),
(26190, 26192),
(26193, 26195),
(26199, 26202),
(26203, 26211),
(26212, 26221),
(26222, 26225),
(26227, 26237),
(26238, 26242),
(26243, 26245),
(26247, 26250),
(26251, 26255),
(26256, 26259),
(26262, 26270),
(26271, 26273),
(26274, 26275),
(26276, 26277),
(26278, 26279),
(26283, 26284),
(26285, 26287),
(26289, 26291),
(26292, 26294),
(26296, 26298),
(26299, 26301),
(26302, 26309),
(26311, 26314),
(26316, 26317),
(26318, 26320),
(26324, 26325),
(26326, 26327),
(26329, 26334),
(26335, 26337),
(26342, 26343),
(26344, 26346),
(26347, 26349),
(26350, 26351),
(26352, 26353),
(26354, 26358),
(26359, 26369),
(26371, 26372),
(26373, 26374),
(26375, 26378),
(26379, 26380),
(26381, 26384),
(26387, 26392),
(26393, 26394),
(26395, 26401),
(26402, 26403),
(26406, 26409),
(26410, 26415),
(26417, 26418),
(26419, 26421),
(26422, 26425),
(26426, 26427),
(26429, 26432),
(26433, 26434),
(26437, 26442),
(26444, 26445),
(26446, 26450),
(26451, 26455),
(26457, 26458),
(26460, 26471),
(26474, 26475),
(26476, 26488),
(26491, 26493),
(26494, 26496),
(26497, 26498),
(26500, 26502),
(26503, 26504),
(26505, 26506),
(26507, 26509),
(26510, 26514),
(26515, 26516),
(26517, 26526),
(26528, 26531),
(26534, 26535),
(26537, 26538),
(26543, 26554),
(26555, 26558),
(26560, 26567),
(26568, 26571),
(26574, 26581),
(26583, 26587),
(26588, 26591),
(26593, 26595),
(26596, 26597),
(26598, 26600),
(26601, 26602),
(26604, 26605),
(26606, 26616),
(26617, 26618),
(26619, 26620),
(26622, 26624),
(26625, 26629),
(26643, 26645),
(26646, 26648),
(26649, 26650),
(26653, 26656),
(26657, 26659),
(26663, 26670),
(26671, 26677),
(26680, 26682),
(26683, 26686),
(26687, 26695),
(26696, 26697),
(26698, 26699),
(26700, 26703),
(26704, 26710),
(26711, 26714),
(26715, 26718),
(26719, 26720),
(26723, 26724),
(26727, 26728),
(26731, 26732),
(26734, 26739),
(26740, 26744),
(26745, 26749),
(26750, 26752),
(26753, 26759),
(26760, 26761),
(26765, 26766),
(26767, 26768),
(26771, 26773),
(26774, 26777),
(26778, 26782),
(26783, 26788),
(26789, 26795),
(26797, 26804),
(26805, 26807),
(26809, 26813),
(26820, 26823),
(26824, 26830),
(26831, 26843),
(26844, 26846),
(26847, 26850),
(26851, 26852),
(26853, 26854),
(26855, 26857),
(26858, 26867),
(26869, 26871),
(26873, 26878),
(26880, 26882),
(26884, 26887),
(26888, 26900),
(26902, 26904),
(26905, 26909),
(26913, 26916),
(26917, 26919),
(26920, 26921),
(26922, 26923),
(26928, 26930),
(26931, 26935),
(26936, 26938),
(26939, 26940),
(26941, 26942),
(26943, 26944),
(26946, 26947),
(26949, 26950),
(26953, 26955),
(26958, 26959),
(26963, 26966),
(26967, 26968),
(26969, 26975),
(26976, 26983),
(26984, 26998),
(26999, 27011),
(27018, 27019),
(27021, 27023),
(27025, 27027),
(27028, 27031),
(27032, 27033),
(27035, 27037),
(27040, 27042),
(27045, 27049),
(27051, 27052),
(27053, 27056),
(27057, 27059),
(27060, 27061),
(27063, 27065),
(27066, 27069),
(27070, 27072),
(27073, 27074),
(27075, 27076),
(27077, 27078),
(27079, 27081),
(27082, 27087),
(27088, 27090),
(27091, 27092),
(27094, 27098),
(27101, 27103),
(27106, 27107),
(27109, 27110),
(27111, 27113),
(27115, 27116),
(27117, 27120),
(27121, 27124),
(27125, 27126),
(27129, 27130),
(27131, 27132),
(27133, 27140),
(27141, 27142),
(27146, 27149),
(27151, 27152),
(27153, 27158),
(27159, 27160),
(27161, 27164),
(27165, 27173),
(27176, 27180),
(27182, 27183),
(27184, 27185),
(27186, 27187),
(27188, 27196),
(27197, 27200),
(27204, 27212),
(27214, 27215),
(27216, 27219),
(27221, 27223),
(27224, 27226),
(27227, 27228),
(27231, 27232),
(27233, 27235),
(27236, 27237),
(27238, 27240),
(27242, 27244),
(27249, 27252),
(27256, 27257),
(27262, 27266),
(27267, 27269),
(27270, 27272),
(27273, 27274),
(27275, 27276),
(27277, 27279),
(27280, 27282),
(27287, 27288),
(27291, 27297),
(27298, 27300),
(27301, 27302),
(27306, 27309),
(27310, 27314),
(27315, 27317),
(27320, 27321),
(27323, 27324),
(27325, 27328),
(27329, 27332),
(27334, 27335),
(27336, 27338),
(27340, 27341),
(27344, 27346),
(27347, 27351),
(27354, 27360),
(27362, 27363),
(27364, 27365),
(27367, 27369),
(27370, 27371),
(27372, 27373),
(27376, 27379),
(27386, 27390),
(27394, 27400),
(27401, 27403),
(27407, 27411),
(27414, 27416),
(27419, 27420),
(27421, 27426),
(27427, 27429),
(27431, 27433),
(27435, 27437),
(27439, 27440),
(27442, 27443),
(27445, 27452),
(27453, 27456),
(27459, 27460),
(27462, 27464),
(27465, 27467),
(27468, 27471),
(27472, 27473),
(27474, 27477),
(27478, 27479),
(27480, 27482),
(27483, 27484),
(27485, 27486),
(27487, 27493),
(27494, 27496),
(27497, 27500),
(27502, 27505),
(27507, 27510),
(27512, 27514),
(27515, 27516),
(27517, 27521),
(27522, 27527),
(27529, 27532),
(27533, 27534),
(27541, 27545),
(27547, 27548),
(27550, 27553),
(27554, 27557),
(27560, 27574),
(27575, 27585),
(27587, 27591),
(27593, 27594),
(27595, 27599),
(27602, 27605),
(27606, 27607),
(27608, 27609),
(27610, 27612),
(27615, 27616),
(27617, 27618),
(27619, 27620),
(27622, 27624),
(27627, 27629),
(27630, 27632),
(27633, 27634),
(27635, 27636),
(27639, 27640),
(27641, 27642),
(27647, 27648),
(27650, 27651),
(27652, 27654),
(27656, 27658),
(27661, 27669),
(27671, 27672),
(27673, 27674),
(27675, 27676),
(27679, 27680),
(27683, 27685),
(27686, 27689),
(27692, 27693),
(27694, 27695),
(27699, 27705),
(27706, 27708),
(27710, 27715),
(27722, 27724),
(27725, 27729),
(27730, 27731),
(27732, 27734),
(27735, 27736),
(27737, 27745),
(27746, 27747),
(27751, 27753),
(27754, 27756),
(27757, 27758),
(27759, 27761),
(27762, 27765),
(27766, 27767),
(27768, 27772),
(27773, 27775),
(27777, 27780),
(27781, 27786),
(27788, 27790),
(27792, 27793),
(27794, 27805),
(27807, 27808),
(27809, 27811),
(27819, 27820),
(27822, 27823),
(27824, 27829),
(27832, 27840),
(27841, 27843),
(27844, 27847),
(27849, 27851),
(27852, 27854),
(27855, 27864),
(27865, 27870),
(27872, 27876),
(27877, 27878),
(27879, 27885),
(27886, 27893),
(27908, 27909),
(27911, 27912),
(27914, 27917),
(27918, 27920),
(27921, 27924),
(27927, 27928),
(27929, 27932),
(27934, 27936),
(27941, 27948),
(27950, 27952),
(27953, 27956),
(27957, 27959),
(27960, 27962),
(27963, 27968),
(27969, 27970),
(27972, 27974),
(27991, 27992),
(27993, 27995),
(27996, 27997),
(27998, 28000),
(28001, 28002),
(28003, 28008),
(28009, 28011),
(28012, 28013),
(28014, 28017),
(28020, 28021),
(28023, 28026),
(28028, 28029),
(28034, 28035),
(28037, 28038),
(28039, 28041),
(28044, 28045),
(28046, 28047),
(28049, 28058),
(28059, 28061),
(28074, 28075),
(28076, 28077),
(28079, 28080),
(28082, 28083),
(28084, 28086),
(28087, 28090),
(28092, 28094),
(28095, 28097),
(28100, 28105),
(28106, 28109),
(28110, 28112),
(28113, 28115),
(28117, 28119),
(28120, 28122),
(28123, 28124),
(28125, 28131),
(28132, 28135),
(28136, 28141),
(28142, 28157),
(28160, 28161),
(28164, 28166),
(28167, 28172),
(28179, 28180),
(28181, 28182),
(28185, 28188),
(28189, 28200),
(28201, 28202),
(28203, 28208),
(28210, 28211),
(28214, 28215),
(28216, 28221),
(28222, 28223),
(28227, 28230),
(28232, 28236),
(28237, 28240),
(28241, 28245),
(28246, 28249),
(28251, 28256),
(28258, 28260),
(28263, 28265),
(28267, 28268),
(28270, 28272),
(28274, 28276),
(28278, 28279),
(28283, 28284),
(28285, 28289),
(28290, 28291),
(28300, 28302),
(28303, 28305),
(28307, 28308),
(28310, 28311),
(28312, 28314),
(28316, 28318),
(28319, 28321),
(28322, 28323),
(28325, 28326),
(28327, 28328),
(28330, 28331),
(28333, 28336),
(28337, 28340),
(28342, 28344),
(28346, 28348),
(28349, 28350),
(28351, 28358),
(28359, 28368),
(28369, 28370),
(28371, 28374),
(28381, 28383),
(28395, 28400),
(28402, 28403),
(28404, 28405),
(28407, 28410),
(28411, 28412),
(28413, 28416),
(28417, 28419),
(28420, 28421),
(28422, 28423),
(28424, 28427),
(28428, 28430),
(28431, 28432),
(28433, 28434),
(28435, 28439),
(28440, 28441),
(28442, 28444),
(28448, 28449),
(28450, 28452),
(28454, 28455),
(28457, 28462),
(28463, 28468),
(28470, 28471),
(28472, 28473),
(28475, 28477),
(28478, 28480),
(28481, 28482),
(28485, 28486),
(28495, 28496),
(28497, 28501),
(28503, 28512),
(28513, 28515),
(28516, 28517),
(28518, 28519),
(28520, 28521),
(28524, 28529),
(28532, 28533),
(28536, 28537),
(28538, 28539),
(28540, 28543),
(28544, 28549),
(28550, 28553),
(28555, 28559),
(28560, 28565),
(28566, 28568),
(28570, 28571),
(28575, 28578),
(28579, 28585),
(28586, 28587),
(28590, 28594),
(28595, 28596),
(28597, 28599),
(28601, 28602),
(28604, 28605),
(28608, 28612),
(28613, 28617),
(28618, 28619),
(28628, 28630),
(28632, 28633),
(28634, 28636),
(28638, 28642),
(28644, 28645),
(28648, 28650),
(28651, 28653),
(28654, 28658),
(28659, 28660),
(28661, 28663),
(28665, 28667),
(28668, 28671),
(28672, 28674),
(28677, 28680),
(28681, 28682),
(28683, 28684),
(28685, 28686),
(28687, 28688),
(28689, 28690),
(28693, 28694),
(28695, 28697),
(28698, 28700),
(28701, 28705),
(28707, 28708),
(28710, 28713),
(28716, 28717),
(28719, 28721),
(28722, 28723),
(28724, 28725),
(28727, 28728),
(28729, 28730),
(28732, 28733),
(28734, 28735),
(28739, 28741),
(28744, 28749),
(28750, 28751),
(28753, 28754),
(28756, 28758),
(28760, 28761),
(28765, 28767),
(28771, 28774),
(28779, 28781),
(28782, 28785),
(28789, 28791),
(28792, 28793),
(28796, 28799),
(28801, 28802),
(28805, 28807),
(28809, 28811),
(28814, 28815),
(28818, 28819),
(28820, 28826),
(28827, 28828),
(28836, 28837),
(28843, 28850),
(28851, 28853),
(28855, 28860),
(28872, 28873),
(28874, 28876),
(28879, 28880),
(28881, 28882),
(28883, 28887),
(28888, 28890),
(28892, 28894),
(28895, 28896),
(28900, 28901),
(28913, 28914),
(28921, 28923),
(28925, 28926),
(28931, 28936),
(28937, 28938),
(28939, 28941),
(28943, 28944),
(28948, 28949),
(28953, 28955),
(28956, 28957),
(28958, 28959),
(28960, 28962),
(28966, 28967),
(28971, 28972),
(28973, 28974),
(28975, 28978),
(28982, 28983),
(28984, 28985),
(28988, 28989),
(28993, 28994),
(28997, 29000),
(29001, 29005),
(29006, 29007),
(29008, 29009),
(29010, 29011),
(29013, 29016),
(29017, 29019),
(29020, 29021),
(29022, 29023),
(29024, 29025),
(29026, 29027),
(29028, 29034),
(29036, 29037),
(29038, 29039),
(29049, 29050),
(29053, 29054),
(29056, 29057),
(29060, 29062),
(29063, 29065),
(29066, 29067),
(29068, 29069),
(29071, 29072),
(29074, 29075),
(29076, 29078),
(29081, 29084),
(29087, 29089),
(29090, 29091),
(29096, 29097),
(29100, 29101),
(29103, 29108),
(29113, 29115),
(29118, 29122),
(29123, 29125),
(29128, 29130),
(29131, 29133),
(29134, 29135),
(29136, 29137),
(29138, 29144),
(29145, 29147),
(29148, 29149),
(29151, 29153),
(29157, 29160),
(29164, 29167),
(29173, 29174),
(29176, 29178),
(29179, 29181),
(29182, 29185),
(29190, 29194),
(29197, 29198),
(29200, 29201),
(29203, 29204),
(29207, 29208),
(29210, 29212),
(29213, 29214),
(29215, 29216),
(29220, 29221),
(29224, 29225),
(29226, 29230),
(29231, 29233),
(29234, 29235),
(29236, 29239),
(29240, 29252),
(29253, 29257),
(29259, 29261),
(29262, 29265),
(29266, 29268),
(29269, 29271),
(29272, 29284),
(29287, 29290),
(29291, 29292),
(29294, 29296),
(29297, 29299),
(29300, 29301),
(29303, 29306),
(29307, 29315),
(29316, 29317),
(29319, 29320),
(29321, 29322),
(29325, 29327),
(29330, 29332),
(29334, 29335),
(29339, 29340),
(29344, 29345),
(29346, 29347),
(29351, 29353),
(29356, 29360),
(29361, 29363),
(29364, 29365),
(29366, 29367),
(29369, 29370),
(29374, 29375),
(29377, 29381),
(29382, 29384),
(29385, 29386),
(29388, 29389),
(29390, 29391),
(29392, 29393),
(29394, 29395),
(29397, 29402),
(29403, 29404),
(29407, 29411),
(29413, 29414),
(29417, 29418),
(29420, 29422),
(29427, 29429),
(29431, 29439),
(29442, 29443),
(29444, 29446),
(29447, 29448),
(29450, 29452),
(29453, 29454),
(29458, 29460),
(29462, 29466),
(29467, 29472),
(29474, 29475),
(29476, 29478),
(29479, 29485),
(29486, 29488),
(29489, 29491),
(29492, 29496),
(29498, 29500),
(29501, 29504),
(29507, 29510),
(29517, 29521),
(29522, 29523),
(29526, 29529),
(29533, 29537),
(29539, 29540),
(29542, 29549),
(29550, 29555),
(29557, 29558),
(29559, 29565),
(29568, 29570),
(29571, 29576),
(29577, 29578),
(29579, 29580),
(29582, 29583),
(29584, 29585),
(29587, 29588),
(29589, 29593),
(29596, 29597),
(29598, 29601),
(29602, 29603),
(29605, 29607),
(29609, 29612),
(29613, 29614),
(29618, 29620),
(29621, 29622),
(29623, 29624),
(29625, 29626),
(29627, 29630),
(29631, 29633),
(29634, 29635),
(29637, 29639),
(29640, 29648),
(29650, 29652),
(29654, 29655),
(29657, 29658),
(29661, 29663),
(29664, 29666),
(29667, 29668),
(29669, 29672),
(29673, 29675),
(29677, 29679),
(29681, 29682),
(29684, 29686),
(29687, 29692),
(29693, 29698),
(29699, 29704),
(29705, 29707),
(29713, 29714),
(29722, 29724),
(29730, 29731),
(29732, 29735),
(29736, 29751),
(29753, 29755),
(29759, 29762),
(29763, 29765),
(29766, 29768),
(29771, 29772),
(29773, 29774),
(29777, 29779),
(29781, 29782),
(29783, 29784),
(29785, 29793),
(29794, 29797),
(29798, 29804),
(29805, 29812),
(29814, 29815),
(29822, 29823),
(29824, 29826),
(29827, 29828),
(29829, 29832),
(29833, 29834),
(29835, 29836),
(29839, 29843),
(29848, 29851),
(29852, 29853),
(29854, 29860),
(29862, 29868),
(29870, 29875),
(29877, 29878),
(29881, 29882),
(29883, 29884),
(29885, 29886),
(29887, 29888),
(29896, 29899),
(29900, 29901),
(29903, 29905),
(29907, 29909),
(29912, 29913),
(29914, 29917),
(29918, 29921),
(29922, 29925),
(29926, 29932),
(29934, 29939),
(29940, 29941),
(29942, 29945),
(29946, 29949),
(29951, 29952),
(29953, 29954),
(29955, 29959),
(29964, 29967),
(29969, 29972),
(29973, 29977),
(29978, 29979),
(29980, 29981),
(29982, 29986),
(29987, 29997),
(29999, 30004),
(30006, 30017),
(30019, 30021),
(30022, 30035),
(30036, 30037),
(30039, 30040),
(30041, 30051),
(30052, 30056),
(30057, 30060),
(30061, 30062),
(30063, 30066),
(30067, 30069),
(30070, 30080),
(30081, 30083),
(30085, 30088),
(30089, 30092),
(30094, 30102),
(30105, 30107),
(30108, 30110),
(30114, 30118),
(30123, 30124),
(30129, 30134),
(30136, 30139),
(30140, 30152),
(30154, 30155),
(30156, 30160),
(30162, 30163),
(30164, 30166),
(30167, 30170),
(30171, 30173),
(30174, 30181),
(30183, 30184),
(30185, 30186),
(30188, 30189),
(30190, 30197),
(30201, 30203),
(30204, 30205),
(30206, 30213),
(30215, 30222),
(30223, 30224),
(30226, 30228),
(30229, 30231),
(30233, 30234),
(30235, 30248),
(30249, 30250),
(30253, 30254),
(30256, 30257),
(30258, 30262),
(30264, 30269),
(30272, 30285),
(30290, 30291),
(30293, 30295),
(30296, 30298),
(30300, 30301),
(30303, 30304),
(30305, 30307),
(30308, 30310),
(30311, 30315),
(30316, 30323),
(30324, 30325),
(30326, 30327),
(30328, 30329),
(30330, 30335),
(30336, 30345),
(30347, 30351),
(30352, 30353),
(30355, 30356),
(30357, 30359),
(30361, 30369),
(30370, 30377),
(30378, 30379),
(30381, 30383),
(30384, 30385),
(30388, 30389),
(30391, 30395),
(30397, 30398),
(30399, 30400),
(30401, 30404),
(30405, 30407),
(30408, 30415),
(30418, 30419),
(30420, 30421),
(30422, 30424),
(30425, 30426),
(30427, 30429),
(30430, 30434),
(30435, 30441),
(30442, 30443),
(30444, 30445),
(30446, 30447),
(30448, 30451),
(30452, 30453),
(30454, 30455),
(30456, 30458),
(30459, 30461),
(30462, 30463),
(30464, 30466),
(30468, 30469),
(30470, 30477),
(30478, 30479),
(30482, 30483),
(30484, 30486),
(30487, 30488),
(30489, 30493),
(30494, 30497),
(30498, 30499),
(30500, 30503),
(30504, 30506),
(30509, 30512),
(30516, 30523),
(30524, 30527),
(30528, 30529),
(30530, 30531),
(30533, 30536),
(30538, 30539),
(30541, 30544),
(30546, 30547),
(30550, 30552),
(30554, 30557),
(30558, 30569),
(30570, 30573),
(30576, 30577),
(30578, 30581),
(30585, 30587),
(30589, 30593),
(30596, 30597),
(30603, 30607),
(30609, 30610),
(30612, 30615),
(30618, 30619),
(30622, 30625),
(30626, 30627),
(30629, 30630),
(30631, 30632),
(30634, 30635),
(30636, 30642),
(30643, 30644),
(30645, 30647),
(30649, 30650),
(30651, 30656),
(30659, 30660),
(30663, 30664),
(30665, 30666),
(30669, 30670),
(30673, 30675),
(30677, 30678),
(30679, 30680),
(30681, 30685),
(30686, 30689),
(30690, 30696),
(30697, 30699),
(30700, 30706),
(30707, 30709),
(30712, 30713),
(30715, 30717),
(30722, 30723),
(30725, 30727),
(30729, 30730),
(30732, 30735),
(30737, 30739),
(30740, 30742),
(30749, 30750),
(30752, 30756),
(30757, 30760),
(30765, 30767),
(30768, 30769),
(30770, 30771),
(30772, 30774),
(30775, 30776),
(30778, 30779),
(30783, 30784),
(30787, 30790),
(30791, 30793),
(30796, 30797),
(30798, 30799),
(30802, 30803),
(30812, 30815),
(30816, 30818),
(30819, 30821),
(30824, 30825),
(30826, 30829),
(30830, 30832),
(30834, 30835),
(30836, 30837),
(30842, 30843),
(30844, 30845),
(30846, 30847),
(30849, 30850),
(30854, 30856),
(30858, 30859),
(30860, 30864),
(30865, 30866),
(30867, 30870),
(30871, 30873),
(30874, 30875),
(30877, 30880),
(30881, 30882),
(30883, 30885),
(30887, 30891),
(30892, 30894),
(30895, 30900),
(30901, 30902),
(30906, 30912),
(30913, 30914),
(30917, 30925),
(30926, 30927),
(30928, 30935),
(30938, 30940),
(30943, 30946),
(30948, 30949),
(30950, 30953),
(30954, 30955),
(30956, 30957),
(30959, 30960),
(30962, 30965),
(30966, 30968),
(30970, 30972),
(30973, 30974),
(30975, 30978),
(30982, 30984),
(30988, 30989),
(30990, 30991),
(30992, 30995),
(31001, 31003),
(31004, 31005),
(31006, 31009),
(31013, 31016),
(31017, 31022),
(31024, 31026),
(31028, 31030),
(31034, 31042),
(31044, 31052),
(31055, 31058),
(31059, 31065),
(31066, 31073),
(31074, 31075),
(31077, 31078),
(31079, 31082),
(31083, 31084),
(31085, 31086),
(31090, 31091),
(31095, 31096),
(31097, 31101),
(31102, 31106),
(31108, 31110),
(31114, 31120),
(31121, 31122),
(31123, 31127),
(31128, 31129),
(31131, 31134),
(31137, 31138),
(31142, 31148),
(31150, 31154),
(31155, 31157),
(31160, 31164),
(31165, 31171),
(31172, 31173),
(31175, 31180),
(31183, 31184),
(31185, 31187),
(31188, 31191),
(31192, 31193),
(31194, 31195),
(31197, 31208),
(31209, 31214),
(31216, 31218),
(31224, 31225),
(31227, 31229),
(31232, 31233),
(31234, 31236),
(31239, 31247),
(31249, 31250),
(31252, 31254),
(31255, 31261),
(31262, 31266),
(31271, 31272),
(31275, 31276),
(31277, 31283),
(31284, 31286),
(31287, 31297),
(31298, 31306),
(31308, 31313),
(31317, 31320),
(31321, 31322),
(31324, 31326),
(31327, 31332),
(31333, 31334),
(31335, 31336),
(31337, 31340),
(31341, 31342),
(31344, 31345),
(31348, 31351),
(31352, 31355),
(31357, 31367),
(31368, 31369),
(31370, 31372),
(31376, 31385),
(31390, 31393),
(31395, 31396),
(31401, 31403),
(31404, 31405),
(31406, 31409),
(31411, 31412),
(31413, 31415),
(31417, 31421),
(31423, 31424),
(31427, 31440),
(31441, 31444),
(31445, 31446),
(31449, 31454),
(31455, 31460),
(31461, 31470),
(31471, 31474),
(31476, 31477),
(31478, 31479),
(31480, 31484),
(31485, 31488),
(31490, 31491),
(31492, 31493),
(31494, 31497),
(31498, 31500),
(31503, 31504),
(31505, 31506),
(31508, 31509),
(31512, 31514),
(31515, 31516),
(31518, 31521),
(31523, 31524),
(31525, 31538),
(31539, 31543),
(31545, 31546),
(31549, 31550),
(31551, 31554),
(31557, 31562),
(31563, 31571),
(31572, 31575),
(31581, 31582),
(31584, 31585),
(31588, 31592),
(31593, 31595),
(31596, 31606),
(31607, 31608),
(31610, 31611),
(31620, 31621),
(31622, 31624),
(31625, 31626),
(31627, 31628),
(31629, 31635),
(31636, 31650),
(31653, 31654),
(31658, 31659),
(31660, 31662),
(31663, 31667),
(31668, 31671),
(31672, 31673),
(31674, 31678),
(31680, 31683),
(31684, 31693),
(31695, 31696),
(31700, 31701),
(31702, 31704),
(31705, 31708),
(31709, 31710),
(31712, 31713),
(31716, 31719),
(31720, 31723),
(31725, 31726),
(31730, 31739),
(31740, 31741),
(31742, 31743),
(31744, 31749),
(31750, 31752),
(31753, 31754),
(31755, 31760),
(31761, 31765),
(31767, 31768),
(31769, 31770),
(31771, 31772),
(31775, 31778),
(31779, 31780),
(31781, 31785),
(31786, 31789),
(31793, 31794),
(31795, 31797),
(31798, 31803),
(31805, 31809),
(31811, 31812),
(31814, 31815),
(31818, 31819),
(31820, 31822),
(31823, 31831),
(31832, 31842),
(31843, 31846),
(31847, 31848),
(31849, 31850),
(31852, 31855),
(31856, 31857),
(31858, 31860),
(31861, 31862),
(31865, 31866),
(31868, 31871),
(31873, 31876),
(31878, 31880),
(31881, 31882),
(31883, 31884),
(31885, 31886),
(31887, 31889),
(31890, 31891),
(31892, 31894),
(31895, 31897),
(31899, 31900),
(31902, 31907),
(31908, 31913),
(31915, 31916),
(31917, 31919),
(31920, 31924),
(31926, 31928),
(31929, 31937),
(31938, 31939),
(31940, 31942),
(31943, 31947),
(31949, 31952),
(31954, 31963),
(31964, 31969),
(31970, 31971),
(31974, 31976),
(31977, 31978),
(31979, 31980),
(31983, 31984),
(31986, 31987),
(31988, 31991),
(31992, 31993),
(31994, 31996),
(31998, 31999),
(32000, 32001),
(32002, 32012),
(32013, 32014),
(32015, 32031),
(32032, 32036),
(32038, 32039),
(32042, 32052),
(32053, 32054),
(32057, 32059),
(32060, 32073),
(32075, 32082),
(32083, 32084),
(32086, 32088),
(32089, 32095),
(32097, 32100),
(32101, 32105),
(32106, 32107),
(32110, 32111),
(32112, 32116),
(32117, 32119),
(32120, 32124),
(32125, 32126),
(32127, 32128),
(32129, 32132),
(32133, 32135),
(32136, 32138),
(32139, 32142),
(32143, 32144),
(32145, 32146),
(32147, 32148),
(32150, 32152),
(32153, 32161),
(32162, 32164),
(32166, 32168),
(32170, 32188),
(32189, 32192),
(32194, 32200),
(32202, 32208),
(32209, 32211),
(32213, 32219),
(32220, 32223),
(32224, 32227),
(32228, 32231),
(32232, 32238),
(32239, 32240),
(32241, 32243),
(32244, 32247),
(32249, 32252),
(32256, 32258),
(32260, 32262),
(32264, 32268),
(32272, 32275),
(32277, 32278),
(32279, 32280),
(32283, 32292),
(32294, 32297),
(32299, 32304),
(32305, 32308),
(32309, 32312),
(32313, 32316),
(32317, 32320),
(32321, 32322),
(32323, 32328),
(32330, 32332),
(32333, 32335),
(32336, 32337),
(32338, 32339),
(32340, 32343),
(32344, 32347),
(32349, 32352),
(32353, 32355),
(32357, 32360),
(32361, 32364),
(32365, 32369),
(32371, 32372),
(32376, 32378),
(32379, 32384),
(32385, 32388),
(32390, 32395),
(32396, 32407),
(32408, 32409),
(32410, 32415),
(32566, 32567),
(32568, 32569),
(32570, 32576),
(32579, 32582),
(32583, 32584),
(32588, 32598),
(32600, 32601),
(32603, 32606),
(32607, 32610),
(32611, 32620),
(32621, 32623),
(32624, 32627),
(32629, 32630),
(32631, 32634),
(32637, 32641),
(32642, 32644),
(32645, 32649),
(32650, 32658),
(32660, 32661),
(32662, 32664),
(32666, 32667),
(32668, 32671),
(32673, 32677),
(32678, 32679),
(32680, 32683),
(32685, 32688),
(32690, 32691),
(32692, 32693),
(32694, 32695),
(32696, 32698),
(32700, 32702),
(32703, 32706),
(32707, 32708),
(32709, 32711),
(32712, 32713),
(32714, 32715),
(32716, 32717),
(32718, 32720),
(32722, 32723),
(32724, 32726),
(32731, 32732),
(32735, 32738),
(32739, 32740),
(32741, 32743),
(32744, 32746),
(32747, 32749),
(32750, 32753),
(32754, 32756),
(32761, 32770),
(32771, 32777),
(32778, 32794),
(32796, 32802),
(32804, 32805),
(32806, 32807),
(32808, 32809),
(32812, 32813),
(32814, 32815),
(32816, 32817),
(32819, 32824),
(32825, 32833),
(32836, 32837),
(32838, 32839),
(32842, 32843),
(32850, 32851),
(32854, 32855),
(32856, 32857),
(32858, 32859),
(32862, 32867),
(32868, 32869),
(32870, 32871),
(32872, 32873),
(32877, 32878),
(32879, 32888),
(32889, 32890),
(32893, 32896),
(32897, 32898),
(32900, 32906),
(32907, 32909),
(32910, 32911),
(32915, 32916),
(32918, 32919),
(32920, 32921),
(32922, 32927),
(32929, 32931),
(32933, 32936),
(32937, 32942),
(32943, 32944),
(32945, 32947),
(32948, 32949),
(32952, 32955),
(32963, 32965),
(32966, 32967),
(32968, 32969),
(32972, 32976),
(32978, 32979),
(32980, 32988),
(32989, 32991),
(32992, 32994),
(32996, 32998),
(33005, 33013),
(33014, 33015),
(33016, 33019),
(33020, 33023),
(33026, 33028),
(33029, 33036),
(33046, 33049),
(33050, 33053),
(33054, 33055),
(33056, 33057),
(33059, 33061),
(33063, 33064),
(33065, 33066),
(33068, 33069),
(33071, 33074),
(33075, 33076),
(33077, 33078),
(33081, 33083),
(33084, 33085),
(33086, 33087),
(33093, 33096),
(33098, 33101),
(33102, 33103),
(33104, 33110),
(33111, 33112),
(33119, 33122),
(33125, 33130),
(33131, 33132),
(33133, 33138),
(33140, 33141),
(33143, 33147),
(33151, 33159),
(33160, 33161),
(33162, 33164),
(33166, 33169),
(33171, 33172),
(33173, 33175),
(33176, 33177),
(33178, 33183),
(33184, 33185),
(33186, 33189),
(33192, 33194),
(33198, 33199),
(33200, 33201),
(33202, 33206),
(33208, 33209),
(33210, 33212),
(33213, 33217),
(33218, 33220),
(33221, 33223),
(33224, 33228),
(33229, 33232),
(33233, 33234),
(33235, 33236),
(33237, 33238),
(33239, 33244),
(33245, 33250),
(33251, 33254),
(33255, 33257),
(33258, 33262),
(33264, 33271),
(33272, 33284),
(33285, 33286),
(33287, 33291),
(33292, 33297),
(33298, 33301),
(33302, 33312),
(33313, 33315),
(33320, 33325),
(33326, 33327),
(33330, 33339),
(33344, 33345),
(33347, 33352),
(33355, 33356),
(33358, 33360),
(33361, 33362),
(33366, 33367),
(33368, 33371),
(33372, 33374),
(33375, 33377),
(33378, 33381),
(33382, 33385),
(33386, 33388),
(33389, 33392),
(33393, 33395),
(33396, 33397),
(33398, 33401),
(33403, 33404),
(33405, 33410),
(33411, 33413),
(33415, 33416),
(33417, 33420),
(33421, 33423),
(33425, 33427),
(33428, 33429),
(33430, 33431),
(33432, 33436),
(33437, 33438),
(33439, 33442),
(33443, 33461),
(33463, 33472),
(33477, 33479),
(33488, 33494),
(33495, 33496),
(33497, 33501),
(33502, 33513),
(33514, 33516),
(33517, 33518),
(33519, 33520),
(33521, 33522),
(33523, 33525),
(33526, 33528),
(33529, 33532),
(33533, 33535),
(33536, 33548),
(33550, 33551),
(33558, 33561),
(33563, 33568),
(33569, 33572),
(33576, 33577),
(33579, 33595),
(33596, 33598),
(33600, 33601),
(33602, 33606),
(33607, 33608),
(33609, 33611),
(33613, 33625),
(33634, 33635),
(33648, 33649),
(33651, 33652),
(33653, 33654),
(33655, 33657),
(33659, 33662),
(33663, 33665),
(33666, 33667),
(33668, 33672),
(33673, 33675),
(33677, 33679),
(33682, 33687),
(33688, 33697),
(33698, 33699),
(33702, 33710),
(33713, 33714),
(33717, 33718),
(33725, 33730),
(33733, 33734),
(33735, 33736),
(33737, 33739),
(33740, 33741),
(33742, 33746),
(33747, 33749),
(33750, 33751),
(33752, 33753),
(33756, 33758),
(33759, 33761),
(33768, 33772),
(33775, 33779),
(33780, 33781),
(33782, 33786),
(33787, 33790),
(33793, 33794),
(33795, 33797),
(33798, 33800),
(33802, 33808),
(33809, 33810),
(33811, 33812),
(33813, 33814),
(33817, 33818),
(33824, 33825),
(33826, 33827),
(33833, 33835),
(33836, 33837),
(33839, 33840),
(33841, 33842),
(33845, 33846),
(33848, 33850),
(33852, 33854),
(33861, 33867),
(33869, 33872),
(33873, 33875),
(33878, 33885),
(33888, 33896),
(33897, 33906),
(33907, 33915),
(33916, 33918),
(33921, 33923),
(33924, 33926),
(33931, 33932),
(33936, 33937),
(33938, 33942),
(33945, 33946),
(33948, 33949),
(33950, 33952),
(33953, 33954),
(33958, 33959),
(33960, 33963),
(33965, 33966),
(33967, 33968),
(33969, 33971),
(33972, 33973),
(33976, 33987),
(33988, 33989),
(33990, 33998),
(33999, 34002),
(34003, 34004),
(34006, 34007),
(34009, 34011),
(34012, 34013),
(34023, 34024),
(34026, 34027),
(34028, 34029),
(34030, 34035),
(34036, 34037),
(34039, 34040),
(34042, 34046),
(34047, 34049),
(34050, 34052),
(34054, 34056),
(34060, 34061),
(34062, 34063),
(34064, 34066),
(34067, 34070),
(34071, 34073),
(34074, 34075),
(34076, 34077),
(34078, 34080),
(34081, 34088),
(34090, 34094),
(34095, 34096),
(34098, 34103),
(34109, 34110),
(34111, 34114),
(34115, 34116),
(34118, 34119),
(34120, 34124),
(34126, 34132),
(34133, 34139),
(34140, 34149),
(34152, 34156),
(34157, 34158),
(34159, 34160),
(34167, 34168),
(34169, 34172),
(34173, 34178),
(34180, 34189),
(34191, 34194),
(34195, 34197),
(34199, 34202),
(34203, 34206),
(34207, 34209),
(34210, 34211),
(34212, 34225),
(34228, 34229),
(34230, 34235),
(34236, 34240),
(34241, 34243),
(34247, 34248),
(34249, 34252),
(34253, 34257),
(34261, 34262),
(34264, 34265),
(34266, 34267),
(34268, 34270),
(34271, 34273),
(34276, 34279),
(34280, 34283),
(34285, 34286),
(34291, 34292),
(34294, 34296),
(34297, 34301),
(34302, 34305),
(34306, 34307),
(34308, 34312),
(34314, 34316),
(34317, 34319),
(34320, 34324),
(34326, 34332),
(34334, 34335),
(34337, 34339),
(34343, 34344),
(34345, 34346),
(34349, 34350),
(34351, 34353),
(34358, 34359),
(34360, 34361),
(34362, 34363),
(34364, 34366),
(34367, 34371),
(34374, 34375),
(34381, 34383),
(34384, 34385),
(34386, 34395),
(34396, 34405),
(34407, 34408),
(34409, 34410),
(34411, 34413),
(34415, 34416),
(34417, 34418),
(34421, 34424),
(34425, 34428),
(34440, 34441),
(34442, 34446),
(34449, 34450),
(34451, 34452),
(34453, 34455),
(34456, 34457),
(34458, 34459),
(34460, 34461),
(34465, 34466),
(34467, 34469),
(34470, 34476),
(34477, 34478),
(34479, 34482),
(34483, 34490),
(34495, 34498),
(34499, 34504),
(34505, 34506),
(34507, 34508),
(34509, 34511),
(34513, 34515),
(34516, 34518),
(34519, 34520),
(34521, 34525),
(34526, 34529),
(34531, 34534),
(34535, 34536),
(34537, 34538),
(34540, 34544),
(34552, 34559),
(34560, 34561),
(34562, 34572),
(34573, 34581),
(34584, 34587),
(34588, 34589),
(34590, 34592),
(34593, 34594),
(34595, 34596),
(34597, 34598),
(34600, 34602),
(34606, 34608),
(34609, 34611),
(34612, 34613),
(34615, 34616),
(34617, 34625),
(34627, 34628),
(34629, 34630),
(34633, 34634),
(34635, 34639),
(34643, 34644),
(34645, 34646),
(34647, 34650),
(34653, 34654),
(34655, 34658),
(34659, 34663),
(34664, 34665),
(34666, 34667),
(34670, 34672),
(34673, 34675),
(34676, 34677),
(34678, 34679),
(34680, 34681),
(34683, 34684),
(34687, 34688),
(34690, 34698),
(34699, 34702),
(34704, 34705),
(34707, 34708),
(34709, 34710),
(34711, 34714),
(34718, 34721),
(34722, 34724),
(34727, 34728),
(34731, 34736),
(34737, 34738),
(34739, 34740),
(34741, 34742),
(34746, 34748),
(34749, 34754),
(34756, 34757),
(34758, 34764),
(34766, 34767),
(34768, 34769),
(34770, 34771),
(34773, 34775),
(34777, 34779),
(34780, 34781),
(34783, 34785),
(34786, 34789),
(34794, 34796),
(34797, 34798),
(34799, 34800),
(34801, 34804),
(34806, 34812),
(34814, 34816),
(34817, 34818),
(34819, 34820),
(34821, 34824),
(34825, 34828),
(34829, 34839),
(34840, 34845),
(34846, 34848),
(34849, 34852),
(34855, 34857),
(34861, 34863),
(34864, 34867),
(34869, 34871),
(34873, 34877),
(34880, 34887),
(34888, 34895),
(34897, 34900),
(34901, 34917),
(34920, 34922),
(34923, 34924),
(34928, 34931),
(34933, 34934),
(34935, 34936),
(34937, 34938),
(34939, 34940),
(34941, 34947),
(34952, 34953),
(34955, 34956),
(34957, 34958),
(34962, 34963),
(34966, 34973),
(34974, 34977),
(34978, 34979),
(34980, 34981),
(34984, 34985),
(34986, 34988),
(34990, 34991),
(34992, 34994),
(34996, 34998),
(34999, 35000),
(35002, 35003),
(35005, 35014),
(35018, 35024),
(35025, 35030),
(35032, 35034),
(35035, 35040),
(35041, 35042),
(35047, 35049),
(35055, 35062),
(35063, 35066),
(35068, 35071),
(35073, 35075),
(35076, 35077),
(35078, 35080),
(35082, 35083),
(35084, 35089),
(35090, 35092),
(35093, 35095),
(35096, 35099),
(35100, 35103),
(35104, 35105),
(35109, 35113),
(35114, 35116),
(35120, 35123),
(35125, 35127),
(35128, 35132),
(35134, 35135),
(35136, 35143),
(35145, 35146),
(35148, 35150),
(35151, 35152),
(35154, 35155),
(35158, 35160),
(35162, 35165),
(35166, 35173),
(35174, 35175),
(35178, 35180),
(35181, 35185),
(35186, 35190),
(35191, 35192),
(35194, 35200),
(35201, 35202),
(35203, 35204),
(35206, 35212),
(35213, 35214),
(35215, 35217),
(35219, 35225),
(35226, 35229),
(35231, 35234),
(35237, 35240),
(35241, 35243),
(35244, 35245),
(35247, 35249),
(35250, 35256),
(35258, 35259),
(35260, 35262),
(35263, 35265),
(35282, 35283),
(35284, 35289),
(35290, 35291),
(35292, 35294),
(35299, 35300),
(35301, 35304),
(35305, 35306),
(35307, 35308),
(35309, 35310),
(35313, 35314),
(35315, 35317),
(35318, 35319),
(35320, 35322),
(35325, 35326),
(35327, 35329),
(35330, 35334),
(35335, 35337),
(35338, 35339),
(35340, 35341),
(35342, 35353),
(35355, 35356),
(35357, 35361),
(35362, 35367),
(35370, 35374),
(35375, 35376),
(35377, 35378),
(35379, 35384),
(35386, 35391),
(35392, 35394),
(35395, 35396),
(35397, 35402),
(35405, 35407),
(35408, 35417),
(35419, 35423),
(35424, 35428),
(35429, 35432),
(35433, 35434),
(35435, 35439),
(35440, 35444),
(35445, 35448),
(35449, 35453),
(35454, 35457),
(35458, 35464),
(35465, 35466),
(35467, 35470),
(35471, 35476),
(35477, 35483),
(35486, 35490),
(35491, 35498),
(35500, 35505),
(35506, 35508),
(35510, 35512),
(35513, 35514),
(35515, 35517),
(35518, 35520),
(35522, 35525),
(35526, 35534),
(35535, 35536),
(35537, 35544),
(35546, 35555),
(35556, 35557),
(35558, 35560),
(35563, 35567),
(35568, 35570),
(35571, 35577),
(35578, 35579),
(35580, 35581),
(35582, 35587),
(35588, 35592),
(35594, 35597),
(35598, 35599),
(35600, 35602),
(35604, 35605),
(35606, 35608),
(35609, 35618),
(35622, 35623),
(35624, 35625),
(35627, 35630),
(35632, 35633),
(35635, 35636),
(35639, 35640),
(35641, 35642),
(35644, 35645),
(35646, 35647),
(35649, 35655),
(35656, 35658),
(35660, 35664),
(35666, 35669),
(35670, 35671),
(35672, 35677),
(35678, 35680),
(35683, 35684),
(35686, 35687),
(35691, 35694),
(35695, 35699),
(35700, 35701),
(35702, 35706),
(35708, 35714),
(35715, 35718),
(35722, 35729),
(35730, 35735),
(35737, 35739),
(35740, 35741),
(35742, 35744),
(35895, 35899),
(35901, 35904),
(35905, 35906),
(35909, 35917),
(35918, 35922),
(35923, 35926),
(35927, 35932),
(35933, 35934),
(35937, 35941),
(35942, 35943),
(35944, 35950),
(35955, 35956),
(35957, 35959),
(35960, 35965),
(35966, 35967),
(35970, 35971),
(35973, 35976),
(35977, 35983),
(35984, 35985),
(35986, 35989),
(35992, 35994),
(35995, 35999),
(36000, 36003),
(36004, 36005),
(36007, 36017),
(36018, 36021),
(36022, 36030),
(36031, 36044),
(36045, 36048),
(36049, 36050),
(36051, 36052),
(36053, 36055),
(36057, 36063),
(36064, 36069),
(36070, 36071),
(36072, 36073),
(36074, 36075),
(36076, 36078),
(36079, 36081),
(36082, 36083),
(36084, 36086),
(36087, 36089),
(36090, 36096),
(36097, 36098),
(36099, 36102),
(36103, 36108),
(36109, 36110),
(36111, 36113),
(36114, 36117),
(36118, 36120),
(36123, 36124),
(36196, 36200),
(36201, 36202),
(36203, 36207),
(36208, 36210),
(36211, 36213),
(36214, 36216),
(36223, 36224),
(36225, 36227),
(36228, 36230),
(36232, 36233),
(36234, 36235),
(36237, 36238),
(36240, 36242),
(36245, 36246),
(36249, 36250),
(36254, 36257),
(36259, 36260),
(36262, 36263),
(36264, 36265),
(36267, 36269),
(36271, 36272),
(36274, 36276),
(36277, 36278),
(36279, 36280),
(36281, 36285),
(36286, 36287),
(36288, 36289),
(36290, 36291),
(36293, 36297),
(36298, 36301),
(36302, 36304),
(36305, 36306),
(36308, 36312),
(36313, 36316),
(36317, 36318),
(36319, 36320),
(36321, 36322),
(36323, 36326),
(36327, 36329),
(36330, 36333),
(36335, 36342),
(36348, 36350),
(36351, 36352),
(36353, 36354),
(36356, 36359),
(36360, 36364),
(36367, 36370),
(36372, 36373),
(36374, 36375),
(36381, 36388),
(36390, 36392),
(36394, 36395),
(36400, 36402),
(36403, 36410),
(36413, 36414),
(36416, 36419),
(36420, 36421),
(36423, 36433),
(36436, 36438),
(36441, 36442),
(36443, 36453),
(36457, 36458),
(36460, 36462),
(36463, 36467),
(36468, 36469),
(36470, 36471),
(36473, 36477),
(36481, 36486),
(36487, 36488),
(36489, 36492),
(36493, 36494),
(36496, 36502),
(36505, 36508),
(36509, 36511),
(36513, 36515),
(36519, 36520),
(36521, 36530),
(36531, 36532),
(36533, 36534),
(36538, 36540),
(36542, 36543),
(36544, 36546),
(36547, 36553),
(36554, 36558),
(36559, 36560),
(36561, 36563),
(36564, 36565),
(36571, 36573),
(36575, 36576),
(36578, 36580),
(36584, 36585),
(36587, 36588),
(36589, 36591),
(36592, 36594),
(36599, 36607),
(36608, 36609),
(36610, 36612),
(36613, 36614),
(36615, 36619),
(36620, 36621),
(36623, 36625),
(36626, 36634),
(36635, 36642),
(36643, 36644),
(36645, 36651),
(36652, 36656),
(36659, 36668),
(36670, 36680),
(36681, 36682),
(36684, 36688),
(36689, 36694),
(36695, 36697),
(36700, 36704),
(36705, 36710),
(36763, 36770),
(36771, 36777),
(36781, 36787),
(36789, 36793),
(36794, 36797),
(36798, 36803),
(36804, 36807),
(36810, 36812),
(36813, 36815),
(36816, 36822),
(36826, 36827),
(36832, 36833),
(36834, 36839),
(36840, 36844),
(36845, 36850),
(36852, 36860),
(36861, 36863),
(36864, 36871),
(36872, 36873),
(36875, 36882),
(36883, 36892),
(36893, 36900),
(36903, 36907),
(36908, 36912),
(36913, 36922),
(36924, 36925),
(36926, 36928),
(36929, 36934),
(36935, 36936),
(36937, 36951),
(36952, 36954),
(36955, 36959),
(36960, 36964),
(36965, 36970),
(36972, 36977),
(36978, 36979),
(36980, 36987),
(36988, 36990),
(36991, 36998),
(36999, 37005),
(37006, 37010),
(37013, 37014),
(37015, 37018),
(37019, 37020),
(37024, 37028),
(37029, 37031),
(37032, 37033),
(37034, 37035),
(37039, 37047),
(37048, 37049),
(37053, 37055),
(37057, 37058),
(37059, 37062),
(37063, 37065),
(37066, 37067),
(37068, 37069),
(37070, 37071),
(37074, 37075),
(37077, 37078),
(37079, 37082),
(37083, 37088),
(37089, 37091),
(37092, 37094),
(37096, 37097),
(37099, 37100),
(37101, 37102),
(37103, 37105),
(37108, 37112),
(37117, 37121),
(37122, 37123),
(37124, 37127),
(37128, 37129),
(37133, 37134),
(37136, 37137),
(37138, 37139),
(37140, 37147),
(37148, 37149),
(37150, 37151),
(37152, 37153),
(37154, 37156),
(37157, 37158),
(37159, 37160),
(37161, 37162),
(37165, 37171),
(37172, 37173),
(37174, 37176),
(37177, 37179),
(37180, 37182),
(37187, 37188),
(37191, 37200),
(37202, 37205),
(37206, 37212),
(37217, 37222),
(37223, 37224),
(37225, 37227),
(37228, 37230),
(37234, 37238),
(37239, 37244),
(37249, 37252),
(37253, 37256),
(37257, 37260),
(37261, 37263),
(37264, 37270),
(37271, 37273),
(37276, 37277),
(37278, 37279),
(37281, 37283),
(37284, 37285),
(37286, 37287),
(37288, 37289),
(37290, 37303),
(37304, 37305),
(37306, 37310),
(37311, 37316),
(37317, 37322),
(37323, 37330),
(37331, 37333),
(37334, 37344),
(37345, 37346),
(37347, 37352),
(37353, 37355),
(37356, 37362),
(37365, 37368),
(37369, 37370),
(37371, 37374),
(37375, 37378),
(37380, 37384),
(37385, 37387),
(37388, 37391),
(37392, 37399),
(37400, 37401),
(37404, 37407),
(37411, 37415),
(37416, 37418),
(37420, 37421),
(37422, 37425),
(37427, 37435),
(37436, 37437),
(37438, 37441),
(37442, 37452),
(37453, 37458),
(37463, 37471),
(37472, 37475),
(37476, 37482),
(37486, 37490),
(37493, 37498),
(37499, 37505),
(37507, 37508),
(37509, 37510),
(37512, 37515),
(37517, 37519),
(37521, 37524),
(37525, 37533),
(37535, 37537),
(37540, 37542),
(37543, 37545),
(37547, 37548),
(37549, 37550),
(37551, 37552),
(37554, 37555),
(37558, 37566),
(37567, 37572),
(37573, 37577),
(37579, 37585),
(37586, 37588),
(37589, 37590),
(37591, 37594),
(37596, 37598),
(37599, 37602),
(37603, 37606),
(37607, 37611),
(37612, 37615),
(37616, 37617),
(37618, 37620),
(37624, 37629),
(37631, 37633),
(37634, 37635),
(37638, 37639),
(37640, 37641),
(37645, 37646),
(37647, 37650),
(37652, 37654),
(37656, 37659),
(37660, 37677),
(37678, 37680),
(37682, 37688),
(37690, 37692),
(37700, 37701),
(37703, 37706),
(37707, 37708),
(37709, 37710),
(37712, 37715),
(37716, 37721),
(37722, 37725),
(37726, 37727),
(37728, 37729),
(37732, 37734),
(37735, 37736),
(37737, 37739),
(37740, 37746),
(37747, 37751),
(37754, 37755),
(37756, 37763),
(37768, 37769),
(37770, 37774),
(37775, 37776),
(37778, 37779),
(37780, 37785),
(37786, 37788),
(37790, 37791),
(37793, 37794),
(37795, 37797),
(37798, 37802),
(37803, 37807),
(37808, 37809),
(37812, 37815),
(37817, 37819),
(37825, 37826),
(37827, 37838),
(37840, 37842),
(37843, 37844),
(37846, 37850),
(37852, 37856),
(37857, 37859),
(37860, 37865),
(37879, 37884),
(37885, 37886),
(37889, 37893),
(37895, 37898),
(37901, 37905),
(37907, 37915),
(37919, 37920),
(37921, 37922),
(37931, 37932),
(37934, 37936),
(37937, 37943),
(37944, 37945),
(37946, 37948),
(37949, 37950),
(37951, 37952),
(37953, 37954),
(37955, 37958),
(37960, 37961),
(37962, 37963),
(37964, 37965),
(37969, 37972),
(37973, 37974),
(37977, 37981),
(37982, 37988),
(37992, 37993),
(37994, 37996),
(37997, 38003),
(38005, 38006),
(38007, 38008),
(38012, 38016),
(38017, 38018),
(38019, 38021),
(38263, 38266),
(38270, 38271),
(38272, 38273),
(38274, 38277),
(38279, 38288),
(38289, 38293),
(38294, 38295),
(38296, 38298),
(38301, 38314),
(38315, 38318),
(38322, 38323),
(38324, 38325),
(38326, 38327),
(38329, 38336),
(38339, 38340),
(38342, 38350),
(38352, 38359),
(38360, 38363),
(38364, 38371),
(38372, 38375),
(38428, 38431),
(38433, 38435),
(38436, 38439),
(38440, 38441),
(38442, 38443),
(38444, 38445),
(38446, 38448),
(38449, 38452),
(38455, 38462),
(38463, 38467),
(38468, 38469),
(38475, 38478),
(38479, 38481),
(38482, 38483),
(38484, 38485),
(38486, 38489),
(38491, 38496),
(38497, 38503),
(38506, 38507),
(38508, 38509),
(38510, 38511),
(38512, 38513),
(38514, 38521),
(38522, 38528),
(38529, 38535),
(38536, 38540),
(38541, 38544),
(38545, 38546),
(38548, 38558),
(38559, 38561),
(38563, 38571),
(38574, 38581),
(38582, 38589),
(38592, 38594),
(38596, 38600),
(38601, 38607),
(38609, 38611),
(38613, 38615),
(38616, 38624),
(38626, 38628),
(38632, 38636),
(38639, 38643),
(38646, 38648),
(38649, 38652),
(38656, 38657),
(38658, 38667),
(38669, 38672),
(38673, 38674),
(38675, 38676),
(38678, 38679),
(38681, 38687),
(38689, 38693),
(38695, 38697),
(38698, 38699),
(38704, 38708),
(38712, 38714),
(38715, 38716),
(38717, 38719),
(38721, 38725),
(38726, 38727),
(38728, 38731),
(38733, 38736),
(38737, 38739),
(38741, 38749),
(38750, 38751),
(38752, 38757),
(38758, 38764),
(38765, 38767),
(38769, 38770),
(38771, 38773),
(38774, 38782),
(38783, 38786),
(38788, 38791),
(38793, 38794),
(38795, 38796),
(38797, 38798),
(38799, 38801),
(38805, 38811),
(38812, 38813),
(38814, 38817),
(38818, 38820),
(38822, 38823),
(38824, 38825),
(38827, 38831),
(38833, 38839),
(38840, 38843),
(38844, 38845),
(38846, 38848),
(38849, 38850),
(38851, 38863),
(38864, 38866),
(38867, 38869),
(38871, 38874),
(38875, 38879),
(38880, 38882),
(38884, 38885),
(38893, 38896),
(38897, 38905),
(38906, 38908),
(38911, 38912),
(38913, 38916),
(38917, 38921),
(38922, 38923),
(38924, 38933),
(38934, 38939),
(38940, 38941),
(38942, 38943),
(38944, 38946),
(38947, 38951),
(38955, 38961),
(38962, 38966),
(38967, 38969),
(38971, 38975),
(38980, 38981),
(38982, 38984),
(38986, 38992),
(38993, 39004),
(39006, 39007),
(39010, 39012),
(39013, 39016),
(39018, 39021),
(39023, 39026),
(39027, 39029),
(39080, 39081),
(39082, 39084),
(39085, 39090),
(39092, 39093),
(39094, 39097),
(39098, 39100),
(39103, 39104),
(39106, 39111),
(39112, 39113),
(39116, 39117),
(39131, 39133),
(39135, 39136),
(39137, 39140),
(39141, 39144),
(39145, 39148),
(39149, 39152),
(39154, 39157),
(39158, 39159),
(39164, 39167),
(39170, 39172),
(39173, 39174),
(39175, 39179),
(39180, 39181),
(39184, 39193),
(39194, 39203),
(39204, 39205),
(39206, 39209),
(39211, 39213),
(39214, 39215),
(39217, 39222),
(39225, 39231),
(39232, 39235),
(39237, 39242),
(39243, 39247),
(39248, 39251),
(39252, 39254),
(39255, 39258),
(39259, 39261),
(39262, 39265),
(39318, 39322),
(39323, 39324),
(39325, 39328),
(39333, 39335),
(39336, 39337),
(39340, 39343),
(39344, 39350),
(39353, 39355),
(39356, 39358),
(39359, 39360),
(39361, 39362),
(39363, 39367),
(39368, 39370),
(39376, 39382),
(39384, 39392),
(39394, 39395),
(39399, 39400),
(39402, 39407),
(39408, 39411),
(39412, 39414),
(39416, 39418),
(39419, 39420),
(39421, 39424),
(39425, 39430),
(39435, 39437),
(39438, 39444),
(39446, 39447),
(39449, 39450),
(39454, 39455),
(39456, 39457),
(39458, 39461),
(39463, 39465),
(39467, 39468),
(39469, 39471),
(39472, 39473),
(39475, 39476),
(39477, 39481),
(39486, 39487),
(39488, 39494),
(39495, 39496),
(39498, 39503),
(39505, 39506),
(39508, 39512),
(39514, 39516),
(39517, 39518),
(39519, 39520),
(39522, 39523),
(39524, 39526),
(39529, 39532),
(39592, 39593),
(39594, 39595),
(39596, 39601),
(39602, 39603),
(39604, 39607),
(39608, 39610),
(39611, 39613),
(39614, 39618),
(39619, 39621),
(39622, 39623),
(39624, 39625),
(39630, 39642),
(39643, 39645),
(39646, 39649),
(39650, 39656),
(39657, 39664),
(39665, 39670),
(39671, 39672),
(39673, 39676),
(39677, 39678),
(39679, 39687),
(39688, 39690),
(39691, 39695),
(39696, 39697),
(39698, 39699),
(39702, 39703),
(39704, 39709),
(39711, 39713),
(39714, 39716),
(39717, 39724),
(39725, 39728),
(39729, 39734),
(39735, 39736),
(39737, 39742),
(39745, 39750),
(39752, 39753),
(39755, 39760),
(39761, 39762),
(39764, 39769),
(39770, 39772),
(39774, 39775),
(39777, 39778),
(39779, 39780),
(39781, 39783),
(39784, 39785),
(39786, 39792),
(39794, 39798),
(39799, 39802),
(39807, 39809),
(39811, 39816),
(39817, 39820),
(39821, 39829),
(39830, 39832),
(39834, 39835),
(39837, 39841),
(39846, 39855),
(39856, 39859),
(39860, 39861),
(39863, 39866),
(39867, 39869),
(39870, 39874),
(39878, 39883),
(39886, 39891),
(39892, 39893),
(39894, 39897),
(39899, 39900),
(39901, 39902),
(39903, 39904),
(39905, 39910),
(39911, 39913),
(39914, 39916),
(39919, 39924),
(39925, 39926),
(39927, 39931),
(39933, 39934),
(39935, 39937),
(39938, 39939),
(39940, 39941),
(39942, 39943),
(39944, 39950),
(39951, 39959),
(39960, 39965),
(39966, 39967),
(39969, 39979),
(39981, 39987),
(39989, 39992),
(39993, 39996),
(39997, 39999),
(40001, 40002),
(40003, 40011),
(40014, 40017),
(40018, 40021),
(40022, 40025),
(40026, 40033),
(40035, 40036),
(40039, 40044),
(40046, 40047),
(40048, 40049),
(40050, 40051),
(40053, 40057),
(40059, 40060),
(40165, 40168),
(40169, 40170),
(40171, 40173),
(40176, 40177),
(40178, 40181),
(40182, 40184),
(40185, 40186),
(40194, 40196),
(40198, 40202),
(40203, 40204),
(40206, 40207),
(40209, 40211),
(40213, 40214),
(40215, 40217),
(40219, 40224),
(40227, 40228),
(40230, 40231),
(40232, 40233),
(40234, 40237),
(40239, 40241),
(40242, 40245),
(40250, 40256),
(40257, 40265),
(40266, 40267),
(40272, 40274),
(40275, 40277),
(40281, 40282),
(40284, 40294),
(40297, 40301),
(40303, 40305),
(40306, 40307),
(40310, 40312),
(40314, 40317),
(40318, 40319),
(40323, 40325),
(40326, 40328),
(40329, 40331),
(40333, 40336),
(40338, 40340),
(40341, 40345),
(40346, 40347),
(40353, 40354),
(40356, 40357),
(40361, 40365),
(40366, 40368),
(40369, 40371),
(40372, 40374),
(40376, 40381),
(40383, 40384),
(40385, 40389),
(40390, 40392),
(40393, 40395),
(40399, 40400),
(40403, 40408),
(40409, 40411),
(40414, 40417),
(40421, 40424),
(40425, 40426),
(40427, 40428),
(40429, 40433),
(40434, 40437),
(40440, 40443),
(40445, 40447),
(40450, 40451),
(40455, 40456),
(40458, 40459),
(40462, 40463),
(40464, 40467),
(40469, 40471),
(40473, 40479),
(40565, 40566),
(40568, 40574),
(40575, 40582),
(40583, 40585),
(40587, 40589),
(40590, 40592),
(40593, 40596),
(40597, 40601),
(40603, 40604),
(40605, 40608),
(40612, 40615),
(40616, 40619),
(40620, 40625),
(40627, 40630),
(40632, 40637),
(40638, 40640),
(40644, 40645),
(40646, 40647),
(40648, 40649),
(40651, 40659),
(40660, 40662),
(40664, 40666),
(40667, 40673),
(40676, 40678),
(40679, 40681),
(40684, 40691),
(40692, 40698),
(40699, 40702),
(40703, 40704),
(40706, 40708),
(40711, 40714),
(40718, 40728),
(40729, 40732),
(40735, 40739),
(40742, 40743),
(40746, 40749),
(40751, 40752),
(40753, 40755),
(40756, 40757),
(40759, 40760),
(40761, 40768),
(40769, 40770),
(40771, 40776),
(40778, 40780),
(40782, 40784),
(40786, 40793),
(40794, 40795),
(40797, 40804),
(40806, 40811),
(40812, 40820),
(40821, 40824),
(40826, 40827),
(40829, 40830),
(40845, 40846),
(40847, 40851),
(40852, 40856),
(40860, 40863),
(40864, 40868),
(40869, 40870),
(63785, 63786),
(63964, 63965),
(64014, 64046),
(64259, 64261),
(65281, 65375),
(65377, 65440),
(65504, 65509),
]
| mit | -7,270,062,047,885,370,000 | 14.96342 | 22 | 0.624184 | false |
drammock/mne-python | mne/preprocessing/realign.py | 7 | 3982 | # -*- coding: utf-8 -*-
# Authors: Eric Larson <[email protected]>
# License: BSD (3-clause)
import numpy as np
from ..io import BaseRaw
from ..utils import _validate_type, warn, logger, verbose
@verbose
def realign_raw(raw, other, t_raw, t_other, verbose=None):
"""Realign two simultaneous recordings.
Due to clock drift, recordings at a given same sample rate made by two
separate devices simultaneously can become out of sync over time. This
function uses event times captured by both acquisition devices to resample
``other`` to match ``raw``.
Parameters
----------
raw : instance of Raw
The first raw instance.
other : instance of Raw
The second raw instance. It will be resampled to match ``raw``.
t_raw : array-like, shape (n_events,)
The times of shared events in ``raw`` relative to ``raw.times[0]`` (0).
Typically these could be events on some TTL channel like
``find_events(raw)[:, 0] - raw.first_event``.
t_other : array-like, shape (n_events,)
The times of shared events in ``other`` relative to ``other.times[0]``.
%(verbose)s
Notes
-----
This function operates inplace. It will:
1. Estimate the zero-order (start offset) and first-order (clock drift)
correction.
2. Crop the start of ``raw`` or ``other``, depending on which started
recording first.
3. Resample ``other`` to match ``raw`` based on the clock drift.
4. Crop the end of ``raw`` or ``other``, depending on which stopped
recording first (and the clock drift rate).
This function is primarily designed to work on recordings made at the same
sample rate, but it can also operate on recordings made at different
sample rates to resample and deal with clock drift simultaneously.
.. versionadded:: 0.22
"""
from scipy import stats
_validate_type(raw, BaseRaw, 'raw')
_validate_type(other, BaseRaw, 'other')
t_raw = np.array(t_raw, float)
t_other = np.array(t_other, float)
if t_raw.ndim != 1 or t_raw.shape != t_other.shape:
raise ValueError('t_raw and t_other must be 1D with the same shape, '
f'got shapes {t_raw.shape} and {t_other.shape}')
if len(t_raw) < 20:
warn('Fewer than 20 times passed, results may be unreliable')
# 1. Compute correction factors
coef = np.polyfit(t_other, t_raw, deg=1)
r, p = stats.pearsonr(t_other, t_raw)
msg = f'Linear correlation computed as R={r:0.3f} and p={p:0.2e}'
if p > 0.05 or r <= 0:
raise ValueError(msg + ', cannot resample safely')
if p > 1e-6:
warn(msg + ', results may be unreliable')
else:
logger.info(msg)
dr_ms_s = 1000 * abs(1 - coef[0])
logger.info(
f'Drift rate: {1000 * dr_ms_s:0.1f} μs/sec '
f'(total drift over {raw.times[-1]:0.1f} sec recording: '
f'{raw.times[-1] * dr_ms_s:0.1f} ms)')
# 2. Crop start of recordings to match using the zero-order term
msg = f'Cropping {coef[1]:0.3f} sec from the start of '
if coef[1] > 0: # need to crop start of raw to match other
logger.info(msg + 'raw')
raw.crop(coef[1], None)
t_raw -= coef[1]
else: # need to crop start of other to match raw
logger.info(msg + 'other')
other.crop(-coef[1], None)
t_other += coef[1]
# 3. Resample data using the first-order term
logger.info('Resampling other')
coef = coef[0]
sfreq_new = raw.info['sfreq'] * coef
other.load_data().resample(sfreq_new, verbose=True)
other.info['sfreq'] = raw.info['sfreq']
# 4. Crop the end of one of the recordings if necessary
delta = raw.times[-1] - other.times[-1]
msg = f'Cropping {abs(delta):0.3f} sec from the end of '
if delta > 0:
logger.info(msg + 'raw')
raw.crop(0, other.times[-1])
elif delta < 0:
logger.info(msg + 'other')
other.crop(0, raw.times[-1])
| bsd-3-clause | 1,552,136,163,736,269,800 | 36.556604 | 79 | 0.620447 | false |
hef/samba | lib/testtools/testtools/tests/matchers/helpers.py | 14 | 1643 | # Copyright (c) 2008-2012 testtools developers. See LICENSE for details.
from testtools.tests.helpers import FullStackRunTest
class TestMatchersInterface(object):
run_tests_with = FullStackRunTest
def test_matches_match(self):
matcher = self.matches_matcher
matches = self.matches_matches
mismatches = self.matches_mismatches
for candidate in matches:
self.assertEqual(None, matcher.match(candidate))
for candidate in mismatches:
mismatch = matcher.match(candidate)
self.assertNotEqual(None, mismatch)
self.assertNotEqual(None, getattr(mismatch, 'describe', None))
def test__str__(self):
# [(expected, object to __str__)].
from testtools.matchers._doctest import DocTestMatches
examples = self.str_examples
for expected, matcher in examples:
self.assertThat(matcher, DocTestMatches(expected))
def test_describe_difference(self):
# [(expected, matchee, matcher), ...]
examples = self.describe_examples
for difference, matchee, matcher in examples:
mismatch = matcher.match(matchee)
self.assertEqual(difference, mismatch.describe())
def test_mismatch_details(self):
# The mismatch object must provide get_details, which must return a
# dictionary mapping names to Content objects.
examples = self.describe_examples
for difference, matchee, matcher in examples:
mismatch = matcher.match(matchee)
details = mismatch.get_details()
self.assertEqual(dict(details), details)
| gpl-3.0 | -6,453,091,210,964,741,000 | 38.119048 | 75 | 0.662203 | false |
apple/swift-lldb | packages/Python/lldbsuite/test/tools/lldb-server/TestGdbRemote_vCont.py | 5 | 5722 | from __future__ import print_function
import gdbremote_testcase
from lldbsuite.test.decorators import *
from lldbsuite.test.lldbtest import *
from lldbsuite.test import lldbutil
class TestGdbRemote_vCont(gdbremote_testcase.GdbRemoteTestCaseBase):
mydir = TestBase.compute_mydir(__file__)
def vCont_supports_mode(self, mode, inferior_args=None):
# Setup the stub and set the gdb remote command stream.
procs = self.prep_debug_monitor_and_inferior(
inferior_args=inferior_args)
self.add_vCont_query_packets()
# Run the gdb remote command stream.
context = self.expect_gdbremote_sequence()
self.assertIsNotNone(context)
# Pull out supported modes.
supported_vCont_modes = self.parse_vCont_query_response(context)
self.assertIsNotNone(supported_vCont_modes)
# Verify we support the given mode.
self.assertTrue(mode in supported_vCont_modes)
def vCont_supports_c(self):
self.vCont_supports_mode("c")
def vCont_supports_C(self):
self.vCont_supports_mode("C")
def vCont_supports_s(self):
self.vCont_supports_mode("s")
def vCont_supports_S(self):
self.vCont_supports_mode("S")
@expectedFailureAll(oslist=["ios", "tvos", "watchos", "bridgeos"], bugnumber="rdar://27005337")
@debugserver_test
def test_vCont_supports_c_debugserver(self):
self.init_debugserver_test()
self.build()
self.vCont_supports_c()
@expectedFailureAll(oslist=["ios", "tvos", "watchos", "bridgeos"], bugnumber="rdar://27005337")
@llgs_test
def test_vCont_supports_c_llgs(self):
self.init_llgs_test()
self.build()
self.vCont_supports_c()
@expectedFailureAll(oslist=["ios", "tvos", "watchos", "bridgeos"], bugnumber="rdar://27005337")
@debugserver_test
def test_vCont_supports_C_debugserver(self):
self.init_debugserver_test()
self.build()
self.vCont_supports_C()
@expectedFailureAll(oslist=["ios", "tvos", "watchos", "bridgeos"], bugnumber="rdar://27005337")
@llgs_test
def test_vCont_supports_C_llgs(self):
self.init_llgs_test()
self.build()
self.vCont_supports_C()
@expectedFailureAll(oslist=["ios", "tvos", "watchos", "bridgeos"], bugnumber="rdar://27005337")
@debugserver_test
def test_vCont_supports_s_debugserver(self):
self.init_debugserver_test()
self.build()
self.vCont_supports_s()
@expectedFailureAll(oslist=["ios", "tvos", "watchos", "bridgeos"], bugnumber="rdar://27005337")
@llgs_test
def test_vCont_supports_s_llgs(self):
self.init_llgs_test()
self.build()
self.vCont_supports_s()
@expectedFailureAll(oslist=["ios", "tvos", "watchos", "bridgeos"], bugnumber="rdar://27005337")
@debugserver_test
def test_vCont_supports_S_debugserver(self):
self.init_debugserver_test()
self.build()
self.vCont_supports_S()
@expectedFailureAll(oslist=["ios", "tvos", "watchos", "bridgeos"], bugnumber="rdar://27005337")
@llgs_test
def test_vCont_supports_S_llgs(self):
self.init_llgs_test()
self.build()
self.vCont_supports_S()
@expectedFailureAll(oslist=["ios", "tvos", "watchos", "bridgeos"], bugnumber="rdar://27005337")
@debugserver_test
def test_single_step_only_steps_one_instruction_with_Hc_vCont_s_debugserver(
self):
self.init_debugserver_test()
self.build()
self.set_inferior_startup_launch()
self.single_step_only_steps_one_instruction(
use_Hc_packet=True, step_instruction="vCont;s")
@skipIfWindows # No pty support to test O* & I* notification packets.
@llgs_test
@expectedFailureAndroid(
bugnumber="llvm.org/pr24739",
archs=[
"arm",
"aarch64"])
@expectedFailureAll(
oslist=["linux"],
archs=[
"arm",
"aarch64"],
bugnumber="llvm.org/pr24739")
@skipIf(triple='^mips')
@expectedFailureAll(oslist=["ios", "tvos", "watchos", "bridgeos"], bugnumber="rdar://27005337")
def test_single_step_only_steps_one_instruction_with_Hc_vCont_s_llgs(self):
self.init_llgs_test()
self.build()
self.set_inferior_startup_launch()
self.single_step_only_steps_one_instruction(
use_Hc_packet=True, step_instruction="vCont;s")
@expectedFailureAll(oslist=["ios", "tvos", "watchos", "bridgeos"], bugnumber="rdar://27005337")
@debugserver_test
def test_single_step_only_steps_one_instruction_with_vCont_s_thread_debugserver(
self):
self.init_debugserver_test()
self.build()
self.set_inferior_startup_launch()
self.single_step_only_steps_one_instruction(
use_Hc_packet=False, step_instruction="vCont;s:{thread}")
@skipIfWindows # No pty support to test O* & I* notification packets.
@llgs_test
@expectedFailureAndroid(
bugnumber="llvm.org/pr24739",
archs=[
"arm",
"aarch64"])
@expectedFailureAll(
oslist=["linux"],
archs=[
"arm",
"aarch64"],
bugnumber="llvm.org/pr24739")
@skipIf(triple='^mips')
@expectedFailureAll(oslist=["ios", "tvos", "watchos", "bridgeos"], bugnumber="rdar://27005337")
def test_single_step_only_steps_one_instruction_with_vCont_s_thread_llgs(
self):
self.init_llgs_test()
self.build()
self.set_inferior_startup_launch()
self.single_step_only_steps_one_instruction(
use_Hc_packet=False, step_instruction="vCont;s:{thread}")
| apache-2.0 | 3,233,518,604,053,807,600 | 34.540373 | 99 | 0.62985 | false |
brianwoo/django-tutorial | build/Django/tests/model_meta/results.py | 23 | 26393 | from .models import AbstractPerson, BasePerson, Person, Relating, Relation
TEST_RESULTS = {
'get_all_field_names': {
Person: [
'baseperson_ptr',
'baseperson_ptr_id',
'content_type_abstract',
'content_type_abstract_id',
'content_type_base',
'content_type_base_id',
'content_type_concrete',
'content_type_concrete_id',
'data_abstract',
'data_base',
'data_inherited',
'data_not_concrete_abstract',
'data_not_concrete_base',
'data_not_concrete_inherited',
'fk_abstract',
'fk_abstract_id',
'fk_base',
'fk_base_id',
'fk_inherited',
'fk_inherited_id',
'followers_abstract',
'followers_base',
'followers_concrete',
'following_abstract',
'following_base',
'following_inherited',
'friends_abstract',
'friends_base',
'friends_inherited',
'generic_relation_abstract',
'generic_relation_base',
'generic_relation_concrete',
'id',
'm2m_abstract',
'm2m_base',
'm2m_inherited',
'object_id_abstract',
'object_id_base',
'object_id_concrete',
'relating_basepeople',
'relating_baseperson',
'relating_people',
'relating_person',
],
BasePerson: [
'content_type_abstract',
'content_type_abstract_id',
'content_type_base',
'content_type_base_id',
'data_abstract',
'data_base',
'data_not_concrete_abstract',
'data_not_concrete_base',
'fk_abstract',
'fk_abstract_id',
'fk_base',
'fk_base_id',
'followers_abstract',
'followers_base',
'following_abstract',
'following_base',
'friends_abstract',
'friends_base',
'generic_relation_abstract',
'generic_relation_base',
'id',
'm2m_abstract',
'm2m_base',
'object_id_abstract',
'object_id_base',
'person',
'relating_basepeople',
'relating_baseperson'
],
AbstractPerson: [
'content_type_abstract',
'content_type_abstract_id',
'data_abstract',
'data_not_concrete_abstract',
'fk_abstract',
'fk_abstract_id',
'following_abstract',
'friends_abstract',
'generic_relation_abstract',
'm2m_abstract',
'object_id_abstract',
],
Relating: [
'basepeople',
'basepeople_hidden',
'baseperson',
'baseperson_hidden',
'baseperson_hidden_id',
'baseperson_id',
'id',
'people',
'people_hidden',
'person',
'person_hidden',
'person_hidden_id',
'person_id',
'proxyperson',
'proxyperson_hidden',
'proxyperson_hidden_id',
'proxyperson_id',
],
},
'fields': {
Person: [
'id',
'data_abstract',
'fk_abstract_id',
'data_not_concrete_abstract',
'content_type_abstract_id',
'object_id_abstract',
'data_base',
'fk_base_id',
'data_not_concrete_base',
'content_type_base_id',
'object_id_base',
'baseperson_ptr_id',
'data_inherited',
'fk_inherited_id',
'data_not_concrete_inherited',
'content_type_concrete_id',
'object_id_concrete',
],
BasePerson: [
'id',
'data_abstract',
'fk_abstract_id',
'data_not_concrete_abstract',
'content_type_abstract_id',
'object_id_abstract',
'data_base',
'fk_base_id',
'data_not_concrete_base',
'content_type_base_id',
'object_id_base',
],
AbstractPerson: [
'data_abstract',
'fk_abstract_id',
'data_not_concrete_abstract',
'content_type_abstract_id',
'object_id_abstract',
],
Relating: [
'id',
'baseperson_id',
'baseperson_hidden_id',
'person_id',
'person_hidden_id',
'proxyperson_id',
'proxyperson_hidden_id',
],
},
'local_fields': {
Person: [
'baseperson_ptr_id',
'data_inherited',
'fk_inherited_id',
'data_not_concrete_inherited',
'content_type_concrete_id',
'object_id_concrete',
],
BasePerson: [
'id',
'data_abstract',
'fk_abstract_id',
'data_not_concrete_abstract',
'content_type_abstract_id',
'object_id_abstract',
'data_base',
'fk_base_id',
'data_not_concrete_base',
'content_type_base_id',
'object_id_base',
],
AbstractPerson: [
'data_abstract',
'fk_abstract_id',
'data_not_concrete_abstract',
'content_type_abstract_id',
'object_id_abstract',
],
Relating: [
'id',
'baseperson_id',
'baseperson_hidden_id',
'person_id',
'person_hidden_id',
'proxyperson_id',
'proxyperson_hidden_id',
],
},
'local_concrete_fields': {
Person: [
'baseperson_ptr_id',
'data_inherited',
'fk_inherited_id',
'content_type_concrete_id',
'object_id_concrete',
],
BasePerson: [
'id',
'data_abstract',
'fk_abstract_id',
'content_type_abstract_id',
'object_id_abstract',
'data_base',
'fk_base_id',
'content_type_base_id',
'object_id_base',
],
AbstractPerson: [
'data_abstract',
'fk_abstract_id',
'content_type_abstract_id',
'object_id_abstract',
],
Relating: [
'id',
'baseperson_id',
'baseperson_hidden_id',
'person_id',
'person_hidden_id',
'proxyperson_id',
'proxyperson_hidden_id',
],
},
'many_to_many': {
Person: [
'm2m_abstract',
'friends_abstract',
'following_abstract',
'm2m_base',
'friends_base',
'following_base',
'm2m_inherited',
'friends_inherited',
'following_inherited',
],
BasePerson: [
'm2m_abstract',
'friends_abstract',
'following_abstract',
'm2m_base',
'friends_base',
'following_base',
],
AbstractPerson: [
'm2m_abstract',
'friends_abstract',
'following_abstract',
],
Relating: [
'basepeople',
'basepeople_hidden',
'people',
'people_hidden',
],
},
'many_to_many_with_model': {
Person: [
BasePerson,
BasePerson,
BasePerson,
BasePerson,
BasePerson,
BasePerson,
None,
None,
None,
],
BasePerson: [
None,
None,
None,
None,
None,
None,
],
AbstractPerson: [
None,
None,
None,
],
Relating: [
None,
None,
None,
None,
],
},
'get_all_related_objects_with_model_legacy': {
Person: (
('relating_baseperson', BasePerson),
('relating_person', None),
),
BasePerson: (
('person', None),
('relating_baseperson', None),
),
Relation: (
('fk_abstract_rel', None),
('fo_abstract_rel', None),
('fk_base_rel', None),
('fo_base_rel', None),
('fk_concrete_rel', None),
('fo_concrete_rel', None),
),
},
'get_all_related_objects_with_model_hidden_local': {
Person: (
('+', None),
('+', None),
('Person_following_inherited+', None),
('Person_following_inherited+', None),
('Person_friends_inherited+', None),
('Person_friends_inherited+', None),
('Person_m2m_inherited+', None),
('Relating_people+', None),
('Relating_people_hidden+', None),
('followers_concrete', None),
('friends_inherited_rel_+', None),
('relating_people', None),
('relating_person', None),
),
BasePerson: (
('+', None),
('+', None),
('BasePerson_following_abstract+', None),
('BasePerson_following_abstract+', None),
('BasePerson_following_base+', None),
('BasePerson_following_base+', None),
('BasePerson_friends_abstract+', None),
('BasePerson_friends_abstract+', None),
('BasePerson_friends_base+', None),
('BasePerson_friends_base+', None),
('BasePerson_m2m_abstract+', None),
('BasePerson_m2m_base+', None),
('Relating_basepeople+', None),
('Relating_basepeople_hidden+', None),
('followers_abstract', None),
('followers_base', None),
('friends_abstract_rel_+', None),
('friends_base_rel_+', None),
('person', None),
('relating_basepeople', None),
('relating_baseperson', None),
),
Relation: (
('+', None),
('+', None),
('+', None),
('+', None),
('+', None),
('+', None),
('+', None),
('+', None),
('BasePerson_m2m_abstract+', None),
('BasePerson_m2m_base+', None),
('Person_m2m_inherited+', None),
('fk_abstract_rel', None),
('fk_base_rel', None),
('fk_concrete_rel', None),
('fo_abstract_rel', None),
('fo_base_rel', None),
('fo_concrete_rel', None),
('m2m_abstract_rel', None),
('m2m_base_rel', None),
('m2m_concrete_rel', None),
),
},
'get_all_related_objects_with_model_hidden': {
Person: (
('+', BasePerson),
('+', BasePerson),
('+', None),
('+', None),
('BasePerson_following_abstract+', BasePerson),
('BasePerson_following_abstract+', BasePerson),
('BasePerson_following_base+', BasePerson),
('BasePerson_following_base+', BasePerson),
('BasePerson_friends_abstract+', BasePerson),
('BasePerson_friends_abstract+', BasePerson),
('BasePerson_friends_base+', BasePerson),
('BasePerson_friends_base+', BasePerson),
('BasePerson_m2m_abstract+', BasePerson),
('BasePerson_m2m_base+', BasePerson),
('Person_following_inherited+', None),
('Person_following_inherited+', None),
('Person_friends_inherited+', None),
('Person_friends_inherited+', None),
('Person_m2m_inherited+', None),
('Relating_basepeople+', BasePerson),
('Relating_basepeople_hidden+', BasePerson),
('Relating_people+', None),
('Relating_people_hidden+', None),
('followers_abstract', BasePerson),
('followers_base', BasePerson),
('followers_concrete', None),
('friends_abstract_rel_+', BasePerson),
('friends_base_rel_+', BasePerson),
('friends_inherited_rel_+', None),
('relating_basepeople', BasePerson),
('relating_baseperson', BasePerson),
('relating_people', None),
('relating_person', None),
),
BasePerson: (
('+', None),
('+', None),
('BasePerson_following_abstract+', None),
('BasePerson_following_abstract+', None),
('BasePerson_following_base+', None),
('BasePerson_following_base+', None),
('BasePerson_friends_abstract+', None),
('BasePerson_friends_abstract+', None),
('BasePerson_friends_base+', None),
('BasePerson_friends_base+', None),
('BasePerson_m2m_abstract+', None),
('BasePerson_m2m_base+', None),
('Relating_basepeople+', None),
('Relating_basepeople_hidden+', None),
('followers_abstract', None),
('followers_base', None),
('friends_abstract_rel_+', None),
('friends_base_rel_+', None),
('person', None),
('relating_basepeople', None),
('relating_baseperson', None),
),
Relation: (
('+', None),
('+', None),
('+', None),
('+', None),
('+', None),
('+', None),
('+', None),
('+', None),
('BasePerson_m2m_abstract+', None),
('BasePerson_m2m_base+', None),
('Person_m2m_inherited+', None),
('fk_abstract_rel', None),
('fk_base_rel', None),
('fk_concrete_rel', None),
('fo_abstract_rel', None),
('fo_base_rel', None),
('fo_concrete_rel', None),
('m2m_abstract_rel', None),
('m2m_base_rel', None),
('m2m_concrete_rel', None),
),
},
'get_all_related_objects_with_model_local': {
Person: (
('followers_concrete', None),
('relating_person', None),
('relating_people', None),
),
BasePerson: (
('followers_abstract', None),
('followers_base', None),
('person', None),
('relating_baseperson', None),
('relating_basepeople', None),
),
Relation: (
('fk_abstract_rel', None),
('fo_abstract_rel', None),
('fk_base_rel', None),
('fo_base_rel', None),
('m2m_abstract_rel', None),
('m2m_base_rel', None),
('fk_concrete_rel', None),
('fo_concrete_rel', None),
('m2m_concrete_rel', None),
),
},
'get_all_related_objects_with_model': {
Person: (
('followers_abstract', BasePerson),
('followers_base', BasePerson),
('relating_baseperson', BasePerson),
('relating_basepeople', BasePerson),
('followers_concrete', None),
('relating_person', None),
('relating_people', None),
),
BasePerson: (
('followers_abstract', None),
('followers_base', None),
('person', None),
('relating_baseperson', None),
('relating_basepeople', None),
),
Relation: (
('fk_abstract_rel', None),
('fo_abstract_rel', None),
('fk_base_rel', None),
('fo_base_rel', None),
('m2m_abstract_rel', None),
('m2m_base_rel', None),
('fk_concrete_rel', None),
('fo_concrete_rel', None),
('m2m_concrete_rel', None),
),
},
'get_all_related_objects_with_model_local_legacy': {
Person: (
('relating_person', None),
),
BasePerson: (
('person', None),
('relating_baseperson', None)
),
Relation: (
('fk_abstract_rel', None),
('fo_abstract_rel', None),
('fk_base_rel', None),
('fo_base_rel', None),
('fk_concrete_rel', None),
('fo_concrete_rel', None),
),
},
'get_all_related_objects_with_model_hidden_legacy': {
BasePerson: (
('+', None),
('BasePerson_following_abstract+', None),
('BasePerson_following_abstract+', None),
('BasePerson_following_base+', None),
('BasePerson_following_base+', None),
('BasePerson_friends_abstract+', None),
('BasePerson_friends_abstract+', None),
('BasePerson_friends_base+', None),
('BasePerson_friends_base+', None),
('BasePerson_m2m_abstract+', None),
('BasePerson_m2m_base+', None),
('Relating_basepeople+', None),
('Relating_basepeople_hidden+', None),
('person', None),
('relating_baseperson', None),
),
Person: (
('+', BasePerson),
('+', None),
('BasePerson_following_abstract+', BasePerson),
('BasePerson_following_abstract+', BasePerson),
('BasePerson_following_base+', BasePerson),
('BasePerson_following_base+', BasePerson),
('BasePerson_friends_abstract+', BasePerson),
('BasePerson_friends_abstract+', BasePerson),
('BasePerson_friends_base+', BasePerson),
('BasePerson_friends_base+', BasePerson),
('BasePerson_m2m_abstract+', BasePerson),
('BasePerson_m2m_base+', BasePerson),
('Person_following_inherited+', None),
('Person_following_inherited+', None),
('Person_friends_inherited+', None),
('Person_friends_inherited+', None),
('Person_m2m_inherited+', None),
('Relating_basepeople+', BasePerson),
('Relating_basepeople_hidden+', BasePerson),
('Relating_people+', None),
('Relating_people_hidden+', None),
('relating_baseperson', BasePerson),
('relating_person', None),
),
Relation: (
('+', None),
('+', None),
('+', None),
('+', None),
('+', None),
('+', None),
('+', None),
('+', None),
('BasePerson_m2m_abstract+', None),
('BasePerson_m2m_base+', None),
('Person_m2m_inherited+', None),
('fk_abstract_rel', None),
('fk_base_rel', None),
('fk_concrete_rel', None),
('fo_abstract_rel', None),
('fo_base_rel', None),
('fo_concrete_rel', None),
),
},
'get_all_related_objects_with_model_hidden_local_legacy': {
BasePerson: (
('+', None),
('BasePerson_following_abstract+', None),
('BasePerson_following_abstract+', None),
('BasePerson_following_base+', None),
('BasePerson_following_base+', None),
('BasePerson_friends_abstract+', None),
('BasePerson_friends_abstract+', None),
('BasePerson_friends_base+', None),
('BasePerson_friends_base+', None),
('BasePerson_m2m_abstract+', None),
('BasePerson_m2m_base+', None),
('Relating_basepeople+', None),
('Relating_basepeople_hidden+', None),
('person', None),
('relating_baseperson', None),
),
Person: (
('+', None),
('Person_following_inherited+', None),
('Person_following_inherited+', None),
('Person_friends_inherited+', None),
('Person_friends_inherited+', None),
('Person_m2m_inherited+', None),
('Relating_people+', None),
('Relating_people_hidden+', None),
('relating_person', None),
),
Relation: (
('+', None),
('+', None),
('+', None),
('+', None),
('+', None),
('+', None),
('+', None),
('+', None),
('BasePerson_m2m_abstract+', None),
('BasePerson_m2m_base+', None),
('Person_m2m_inherited+', None),
('fk_abstract_rel', None),
('fk_base_rel', None),
('fk_concrete_rel', None),
('fo_abstract_rel', None),
('fo_base_rel', None),
('fo_concrete_rel', None),
),
},
'get_all_related_objects_with_model_proxy_legacy': {
BasePerson: (
('person', None),
('relating_baseperson', None),
),
Person: (
('relating_baseperson', BasePerson),
('relating_person', None), ('relating_proxyperson', None),
),
Relation: (
('fk_abstract_rel', None), ('fo_abstract_rel', None),
('fk_base_rel', None), ('fo_base_rel', None),
('fk_concrete_rel', None), ('fo_concrete_rel', None),
),
},
'get_all_related_objects_with_model_proxy_hidden_legacy': {
BasePerson: (
('+', None),
('BasePerson_following_abstract+', None),
('BasePerson_following_abstract+', None),
('BasePerson_following_base+', None),
('BasePerson_following_base+', None),
('BasePerson_friends_abstract+', None),
('BasePerson_friends_abstract+', None),
('BasePerson_friends_base+', None),
('BasePerson_friends_base+', None),
('BasePerson_m2m_abstract+', None),
('BasePerson_m2m_base+', None),
('Relating_basepeople+', None),
('Relating_basepeople_hidden+', None),
('person', None),
('relating_baseperson', None),
),
Person: (
('+', BasePerson),
('+', None),
('+', None),
('BasePerson_following_abstract+', BasePerson),
('BasePerson_following_abstract+', BasePerson),
('BasePerson_following_base+', BasePerson),
('BasePerson_following_base+', BasePerson),
('BasePerson_friends_abstract+', BasePerson),
('BasePerson_friends_abstract+', BasePerson),
('BasePerson_friends_base+', BasePerson),
('BasePerson_friends_base+', BasePerson),
('BasePerson_m2m_abstract+', BasePerson),
('BasePerson_m2m_base+', BasePerson),
('Person_following_inherited+', None),
('Person_following_inherited+', None),
('Person_friends_inherited+', None),
('Person_friends_inherited+', None),
('Person_m2m_inherited+', None),
('Relating_basepeople+', BasePerson),
('Relating_basepeople_hidden+', BasePerson),
('Relating_people+', None),
('Relating_people_hidden+', None),
('relating_baseperson', BasePerson),
('relating_person', None),
('relating_proxyperson', None),
),
Relation: (
('+', None),
('+', None),
('+', None),
('+', None),
('+', None),
('+', None),
('+', None),
('+', None),
('BasePerson_m2m_abstract+', None),
('BasePerson_m2m_base+', None),
('Person_m2m_inherited+', None),
('fk_abstract_rel', None),
('fk_base_rel', None),
('fk_concrete_rel', None),
('fo_abstract_rel', None),
('fo_base_rel', None),
('fo_concrete_rel', None),
),
},
'get_all_related_many_to_many_with_model_legacy': {
BasePerson: (
('friends_abstract_rel_+', None),
('followers_abstract', None),
('friends_base_rel_+', None),
('followers_base', None),
('relating_basepeople', None),
('+', None),
),
Person: (
('friends_abstract_rel_+', BasePerson),
('followers_abstract', BasePerson),
('friends_base_rel_+', BasePerson),
('followers_base', BasePerson),
('relating_basepeople', BasePerson),
('+', BasePerson),
('friends_inherited_rel_+', None),
('followers_concrete', None),
('relating_people', None),
('+', None),
),
Relation: (
('m2m_abstract_rel', None),
('m2m_base_rel', None),
('m2m_concrete_rel', None),
),
},
'get_all_related_many_to_many_local_legacy': {
BasePerson: [
'friends_abstract_rel_+',
'followers_abstract',
'friends_base_rel_+',
'followers_base',
'relating_basepeople',
'+',
],
Person: [
'friends_inherited_rel_+',
'followers_concrete',
'relating_people',
'+',
],
Relation: [
'm2m_abstract_rel',
'm2m_base_rel',
'm2m_concrete_rel',
],
},
'virtual_fields': {
AbstractPerson: [
'generic_relation_abstract',
'content_object_abstract',
],
BasePerson: [
'generic_relation_base',
'content_object_base',
'generic_relation_abstract',
'content_object_abstract',
],
Person: [
'content_object_concrete',
'generic_relation_concrete',
'generic_relation_base',
'content_object_base',
'generic_relation_abstract',
'content_object_abstract',
],
},
}
| gpl-3.0 | -393,506,489,255,478,900 | 32.240554 | 74 | 0.451218 | false |
indhub/mxnet | example/speech_recognition/stt_layer_batchnorm.py | 52 | 1994 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import mxnet as mx
def batchnorm(net,
gamma=None,
beta=None,
eps=0.001,
momentum=0.9,
fix_gamma=False,
use_global_stats=False,
output_mean_var=False,
name=None):
if gamma is not None and beta is not None:
net = mx.sym.BatchNorm(data=net,
gamma=gamma,
beta=beta,
eps=eps,
momentum=momentum,
fix_gamma=fix_gamma,
use_global_stats=use_global_stats,
output_mean_var=output_mean_var,
name=name
)
else:
net = mx.sym.BatchNorm(data=net,
eps=eps,
momentum=momentum,
fix_gamma=fix_gamma,
use_global_stats=use_global_stats,
output_mean_var=output_mean_var,
name=name
)
return net
| apache-2.0 | -5,622,036,292,233,558,000 | 38.88 | 65 | 0.521565 | false |
MatteoNardi/dyanote-server | api/views.py | 1 | 1637 | from django.contrib.auth.models import User
from rest_framework import mixins
from rest_framework import generics
from rest_framework import renderers
from rest_framework import permissions
from rest_framework import status
from rest_framework.decorators import api_view
from rest_framework.response import Response
from rest_framework.reverse import reverse
from api.models import Page
from api.serializers import PageSerializer
from api.permissions import IsOwnerOrAdmin
@api_view(('GET',))
def api_root(request, format=None):
return Response({
'users': reverse('user-list', request=request, format=format),
})
class PageList(generics.ListCreateAPIView):
"""
API endpoint that allows pages to be listed and created.
"""
serializer_class = PageSerializer
permission_classes = (permissions.IsAuthenticated, IsOwnerOrAdmin)
def get_queryset(self):
user = self.request.user
if user.is_superuser:
user = User.objects.get(username=self.kwargs['username'])
return Page.objects.filter(author=user)
def pre_save(self, obj):
obj.author = self.request.user
class PageDetail(generics.RetrieveUpdateDestroyAPIView):
"""
API endpoint that allows pages to be viewed, updated and deleted.
"""
serializer_class = PageSerializer
permission_classes = (permissions.IsAuthenticated, IsOwnerOrAdmin)
def get_queryset(self):
user = self.request.user
if user.is_superuser:
return Page.objects.all()
return Page.objects.filter(author=user)
def pre_save(self, obj):
obj.author = self.request.user
| mit | 2,222,135,767,610,232,600 | 29.886792 | 70 | 0.723274 | false |
eonpatapon/nova | nova/api/openstack/compute/views/flavors.py | 49 | 3452 | # Copyright 2010-2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova.api.openstack import common
class ViewBuilder(common.ViewBuilder):
_collection_name = "flavors"
def basic(self, request, flavor):
return {
"flavor": {
"id": flavor["flavorid"],
"name": flavor["name"],
"links": self._get_links(request,
flavor["flavorid"],
self._collection_name),
},
}
def show(self, request, flavor):
flavor_dict = {
"flavor": {
"id": flavor["flavorid"],
"name": flavor["name"],
"ram": flavor["memory_mb"],
"disk": flavor["root_gb"],
"vcpus": flavor.get("vcpus") or "",
"links": self._get_links(request,
flavor["flavorid"],
self._collection_name),
},
}
return flavor_dict
def index(self, request, flavors):
"""Return the 'index' view of flavors."""
coll_name = self._collection_name
return self._list_view(self.basic, request, flavors, coll_name)
def detail(self, request, flavors):
"""Return the 'detail' view of flavors."""
coll_name = self._collection_name + '/detail'
return self._list_view(self.show, request, flavors, coll_name)
def _list_view(self, func, request, flavors, coll_name):
"""Provide a view for a list of flavors.
:param func: Function used to format the flavor data
:param request: API request
:param flavors: List of flavors in dictionary format
:param coll_name: Name of collection, used to generate the next link
for a pagination query
:returns: Flavor reply data in dictionary format
"""
flavor_list = [func(request, flavor)["flavor"] for flavor in flavors]
flavors_links = self._get_collection_links(request,
flavors,
coll_name,
"flavorid")
flavors_dict = dict(flavors=flavor_list)
if flavors_links:
flavors_dict["flavors_links"] = flavors_links
return flavors_dict
class V3ViewBuilder(ViewBuilder):
def show(self, request, flavor):
flavor_dict = super(V3ViewBuilder, self).show(request, flavor)
flavor_dict['flavor'].update({
"swap": flavor["swap"] or "",
"OS-FLV-EXT-DATA:ephemeral": flavor["ephemeral_gb"],
"OS-FLV-DISABLED:disabled": flavor["disabled"],
"vcpus": flavor["vcpus"],
})
return flavor_dict
| apache-2.0 | -9,068,043,544,424,487,000 | 36.11828 | 78 | 0.54606 | false |
tobiasgehring/qudi | hardware/awg/tektronix_awg70k.py | 1 | 63429 | # -*- coding: utf-8 -*-
"""
This file contains the Qudi hardware module for AWG70000 Series.
Qudi is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Qudi is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Qudi. If not, see <http://www.gnu.org/licenses/>.
Copyright (c) the Qudi Developers. See the COPYRIGHT.txt file at the
top-level directory of this distribution and at <https://github.com/Ulm-IQO/qudi/>
"""
import os
import time
import re
import visa
import numpy as np
from socket import socket, AF_INET, SOCK_STREAM
from ftplib import FTP
from collections import OrderedDict
from fnmatch import fnmatch
from core.base import Base
from interface.pulser_interface import PulserInterface, PulserConstraints
class AWG70K(Base, PulserInterface):
"""
"""
_modclass = 'awg70k'
_modtype = 'hardware'
def on_activate(self):
""" Initialisation performed during activation of the module.
"""
config = self.getConfiguration()
if 'awg_visa_address' in config.keys():
self.visa_address = config['awg_visa_address']
else:
self.log.error('This is AWG: Did not find >>awg_visa_address<< in configuration.')
if 'awg_ip_address' in config.keys():
self.ip_address = config['awg_ip_address']
else:
self.log.error('This is AWG: Did not find >>awg_visa_address<< in configuration.')
if 'pulsed_file_dir' in config.keys():
self.pulsed_file_dir = config['pulsed_file_dir']
if not os.path.exists(self.pulsed_file_dir):
homedir = self.get_home_dir()
self.pulsed_file_dir = os.path.join(homedir, 'pulsed_files')
self.log.warning('The directory defined in parameter "pulsed_file_dir" in the '
'config for SequenceGeneratorLogic class does not exist!\n'
'The default home directory\n{0}\n will be taken instead.'
''.format(self.pulsed_file_dir))
else:
homedir = self.get_home_dir()
self.pulsed_file_dir = os.path.join(homedir, 'pulsed_files')
self.log.warning('No parameter "pulsed_file_dir" was specified in the config for '
'SequenceGeneratorLogic as directory for the pulsed files!\nThe '
'default home directory\n{0}\nwill be taken instead.'
''.format(self.pulsed_file_dir))
if 'ftp_root_dir' in config.keys():
self.ftp_root_directory = config['ftp_root_dir']
else:
self.ftp_root_directory = 'C:\\inetpub\\ftproot'
self.log.warning('No parameter "ftp_root_dir" was specified in the config for '
'tektronix_awg70k as directory for the FTP server root on the AWG!\n'
'The default root directory\n{0}\nwill be taken instead.'
''.format(self.ftp_root_directory))
self.host_waveform_directory = self._get_dir_for_name('sampled_hardware_files')
self.asset_directory = 'waves'
self.user = 'anonymous'
self.passwd = 'anonymous@'
if 'ftp_login' in config.keys() and 'ftp_passwd' in config.keys():
self.user = config['ftp_login']
self.passwd = config['ftp_passwd']
# connect ethernet socket and FTP
self._rm = visa.ResourceManager()
if self.visa_address not in self._rm.list_resources():
self.log.error('VISA address "{0}" not found by the pyVISA resource manager.\nCheck '
'the connection by using for example "Agilent Connection Expert".'
''.format(self.visa_address))
else:
self.awg = self._rm.open_resource(self.visa_address)
# Set data transfer format (datatype, is_big_endian, container)
self.awg.values_format.use_binary('f', False, np.array)
# set timeout by default to 15 sec
self.awg.timeout = 15000
self.ftp = FTP(self.ip_address)
self.ftp.login(user=self.user, passwd=self.passwd)
self.ftp.cwd(self.asset_directory)
self.connected = True
self.awg_model = self._get_model_ID()[1]
self.log.debug('Found the following model: {0}'.format(self.awg_model))
self.sample_rate = self.get_sample_rate()
self.amplitude_list, self.offset_list = self.get_analog_level()
self.markers_low, self.markers_high = self.get_digital_level()
self.is_output_enabled = self._is_output_on()
self.use_sequencer = self.has_sequence_mode()
self.active_channel = self.get_active_channels()
self.interleave = self.get_interleave()
self.current_loaded_asset = ''
self._init_loaded_asset()
self.current_status = 0
def on_deactivate(self):
""" Required tasks to be performed during deactivation of the module.
"""
# Closes the connection to the AWG
try:
self.awg.close()
except:
self.log.debug('Closing AWG connection using pyvisa failed.')
self.log.info('Closed connection to AWG')
self.connected = False
return
def get_constraints(self):
"""
Retrieve the hardware constrains from the Pulsing device.
@return constraints object: object with pulser constraints as attributes.
Provides all the constraints (e.g. sample_rate, amplitude, total_length_bins,
channel_config, ...) related to the pulse generator hardware to the caller.
SEE PulserConstraints CLASS IN pulser_interface.py FOR AVAILABLE CONSTRAINTS!!!
If you are not sure about the meaning, look in other hardware files to get an impression.
If still additional constraints are needed, then they have to be added to the
PulserConstraints class.
Each scalar parameter is an ScalarConstraints object defined in cor.util.interfaces.
Essentially it contains min/max values as well as min step size, default value and unit of
the parameter.
PulserConstraints.activation_config differs, since it contain the channel
configuration/activation information of the form:
{<descriptor_str>: <channel_list>,
<descriptor_str>: <channel_list>,
...}
If the constraints cannot be set in the pulsing hardware (e.g. because it might have no
sequence mode) just leave it out so that the default is used (only zeros).
"""
constraints = PulserConstraints()
# The compatible file formats are hardware specific.
constraints.waveform_format = ['wfmx', 'wfm']
constraints.sequence_format = ['seqx', 'seq']
if self.awg_model == 'AWG70002A':
constraints.sample_rate.min = 1.5e3
constraints.sample_rate.max = 25.0e9
constraints.sample_rate.step = 5.0e2
constraints.sample_rate.default = 25.0e9
elif self.awg_model == 'AWG70001A':
constraints.sample_rate.min = 3.0e3
constraints.sample_rate.max = 50.0e9
constraints.sample_rate.step = 1.0e3
constraints.sample_rate.default = 50.0e9
constraints.a_ch_amplitude.min = 0.25
constraints.a_ch_amplitude.max = 0.5
constraints.a_ch_amplitude.step = 0.001
constraints.a_ch_amplitude.default = 0.5
# FIXME: Enter the proper digital channel low constraints:
constraints.d_ch_low.min = 0.0
constraints.d_ch_low.max = 0.0
constraints.d_ch_low.step = 0.0
constraints.d_ch_low.default = 0.0
# FIXME: Enter the proper digital channel high constraints:
constraints.d_ch_high.min = 0.0
constraints.d_ch_high.max = 1.4
constraints.d_ch_high.step = 0.1
constraints.d_ch_high.default = 1.4
constraints.sampled_file_length.min = 1
constraints.sampled_file_length.max = 8000000000
constraints.sampled_file_length.step = 1
constraints.sampled_file_length.default = 1
# FIXME: Check the proper number for your device
constraints.waveform_num.min = 1
constraints.waveform_num.max = 32000
constraints.waveform_num.step = 1
constraints.waveform_num.default = 1
# FIXME: Check the proper number for your device
constraints.sequence_num.min = 1
constraints.sequence_num.max = 4000
constraints.sequence_num.step = 1
constraints.sequence_num.default = 1
# FIXME: Check the proper number for your device
constraints.subsequence_num.min = 1
constraints.subsequence_num.max = 8000
constraints.subsequence_num.step = 1
constraints.subsequence_num.default = 1
# If sequencer mode is available then these should be specified
constraints.repetitions.min = 0
constraints.repetitions.max = 65536
constraints.repetitions.step = 1
constraints.repetitions.default = 0
# ToDo: Check how many external triggers are available
constraints.trigger_in.min = 0
constraints.trigger_in.max = 2
constraints.trigger_in.step = 1
constraints.trigger_in.default = 0
constraints.event_jump_to.min = 0
constraints.event_jump_to.max = 8000
constraints.event_jump_to.step = 1
constraints.event_jump_to.default = 0
constraints.go_to.min = 0
constraints.go_to.max = 8000
constraints.go_to.step = 1
constraints.go_to.default = 0
# the name a_ch<num> and d_ch<num> are generic names, which describe UNAMBIGUOUSLY the
# channels. Here all possible channel configurations are stated, where only the generic
# names should be used. The names for the different configurations can be customary chosen.
activation_config = OrderedDict()
if self.awg_model == 'AWG70002A':
activation_config['all'] = ['a_ch1', 'd_ch1', 'd_ch2', 'a_ch2', 'd_ch3', 'd_ch4']
# Usage of both channels but reduced markers (higher analog resolution)
activation_config['ch1_2mrk_ch2_1mrk'] = ['a_ch1', 'd_ch1', 'd_ch2', 'a_ch2', 'd_ch3']
activation_config['ch1_2mrk_ch2_0mrk'] = ['a_ch1', 'd_ch1', 'd_ch2', 'a_ch2']
activation_config['ch1_1mrk_ch2_2mrk'] = ['a_ch1', 'd_ch1', 'a_ch2', 'd_ch3', 'd_ch4']
activation_config['ch1_0mrk_ch2_2mrk'] = ['a_ch1', 'a_ch2', 'd_ch3', 'd_ch4']
activation_config['ch1_1mrk_ch2_1mrk'] = ['a_ch1', 'd_ch1', 'a_ch2', 'd_ch3']
activation_config['ch1_0mrk_ch2_1mrk'] = ['a_ch1', 'a_ch2', 'd_ch3']
activation_config['ch1_1mrk_ch2_0mrk'] = ['a_ch1', 'd_ch1', 'a_ch2']
# Usage of channel 1 only:
activation_config['ch1_2mrk'] = ['a_ch1', 'd_ch1', 'd_ch2']
# Usage of channel 2 only:
activation_config['ch2_2mrk'] = ['a_ch2', 'd_ch3', 'd_ch4']
# Usage of only channel 1 with one marker:
activation_config['ch1_1mrk'] = ['a_ch1', 'd_ch1']
# Usage of only channel 2 with one marker:
activation_config['ch2_1mrk'] = ['a_ch2', 'd_ch3']
# Usage of only channel 1 with no marker:
activation_config['ch1_0mrk'] = ['a_ch1']
# Usage of only channel 2 with no marker:
activation_config['ch2_0mrk'] = ['a_ch2']
elif self.awg_model == 'AWG70001A':
activation_config['all'] = ['a_ch1', 'd_ch1', 'd_ch2']
# Usage of only channel 1 with one marker:
activation_config['ch1_1mrk'] = ['a_ch1', 'd_ch1']
# Usage of only channel 1 with no marker:
activation_config['ch1_0mrk'] = ['a_ch1']
constraints.activation_config = activation_config
# FIXME: additional constraint really necessary?
constraints.dac_resolution = {'min': 8, 'max': 10, 'step': 1, 'unit': 'bit'}
return constraints
def pulser_on(self):
""" Switches the pulsing device on.
@return int: error code (0:OK, -1:error, higher number corresponds to
current status of the device. Check then the
class variable status_dic.)
"""
# Check if AWG is in function generator mode
# self._activate_awg_mode()
self.awg.write('AWGC:RUN')
# wait until the AWG is actually running
while not self._is_output_on():
time.sleep(0.25)
self.current_status = 1
self.is_output_enabled = True
return self.current_status
def pulser_off(self):
""" Switches the pulsing device off.
@return int: error code (0:OK, -1:error, higher number corresponds to
current status of the device. Check then the
class variable status_dic.)
"""
self.awg.write('AWGC:STOP')
# wait until the AWG has actually stopped
while self._is_output_on():
time.sleep(0.25)
self.current_status = 0
self.is_output_enabled = False
return 0
def upload_asset(self, asset_name=None):
""" Upload an already hardware conform file to the device.
Does NOT load it into channels.
@param str name: name of the ensemble/sequence to be uploaded
@return int: error code (0:OK, -1:error)
If nothing is passed, method will be skipped.
"""
# check input
if asset_name is None:
self.log.warning('No asset name provided for upload!\nCorrect that!\n'
'Command will be ignored.')
return -1
# self._activate_awg_mode()
# at first delete all the name, which might lead to confusions in the upload procedure:
self.delete_asset(asset_name)
# determine which files to transfer
filelist = self._get_filenames_on_host()
upload_names = []
for filename in filelist:
if filename == asset_name + '.seq':
upload_names.append(filename)
break
elif filename == asset_name + '.seqx':
upload_names.append(filename)
break
elif fnmatch(filename, asset_name + '_ch?.wfm*'):
upload_names.append(filename)
elif fnmatch(filename, asset_name + '.wfm*'):
upload_names.append(filename)
break
elif filename == asset_name + '.mat':
upload_names.append(filename)
break
# Transfer files and load into AWG workspace
for filename in upload_names:
self._send_file(filename)
file_path = os.path.join(self.ftp_root_directory, self.asset_directory, filename)
if filename.endswith('.mat'):
self.awg.write('MMEM:OPEN:SASS:WAV "{0}"'.format(file_path))
else:
self.awg.write('MMEM:OPEN "{0}"'.format(file_path))
self.awg.query('*OPC?')
# Wait for the loading to completed
while int(self.awg.query('*OPC?')) != 1:
time.sleep(0.2)
return 0
def load_asset(self, asset_name, load_dict=None):
""" Loads a sequence or waveform to the specified channel of the pulsing
device.
@param str asset_name: The name of the asset to be loaded
@param dict load_dict: a dictionary with keys being one of the
available channel numbers and items being the
name of the already sampled
waveform/sequence files.
Examples: {1: rabi_ch1, 2: rabi_ch2}
{1: rabi_ch2, 2: rabi_ch1}
This parameter is optional. If none is given
then the channel association is invoked from
the sequence generation,
i.e. the filename appendix (_ch1, _ch2 etc.)
@return int: error code (0:OK, -1:error)
Unused for digital pulse generators without sequence storage capability
(PulseBlaster, FPGA).
"""
# self._activate_awg_mode()
# Get all sequence and waveform names currently loaded into AWG workspace
seq_list = self._get_sequence_names_memory()
wfm_list = self._get_waveform_names_memory()
# Check if load_dict is None or an empty dict
if not load_dict:
# check if the desired asset is in workspace. Load to channels if that is the case.
if asset_name in seq_list:
trac_num = int(self.awg.query('SLIS:SEQ:TRAC? "{0}"'.format(asset_name)))
for chnl in range(1, trac_num + 1):
self.awg.write('SOUR{0}:CASS:SEQ "{1}", {2}'.format(chnl, asset_name, chnl))
# check if the desired asset is in workspace. Load to channels if that is the case.
elif asset_name + '_ch1' in wfm_list:
self.awg.write('SOUR1:CASS:WAV "{0}"'.format(asset_name + '_ch1'))
if self._get_max_a_channel_number() > 1 and asset_name + '_ch2' in wfm_list:
self.awg.write('SOUR2:CASS:WAV "{0}"'.format(asset_name + '_ch2'))
self.current_loaded_asset = asset_name
else:
self.log.error('Loading assets into user defined channels is not yet implemented.\n'
'In other words: The "load_dict" parameter of the "load_asset" method '
'is not handled yet.')
# Wait for the loading to completed
while int(self.awg.query('*OPC?')) != 1:
time.sleep(0.2)
return 0
def get_loaded_asset(self):
""" Retrieve the currently loaded asset name of the device.
@return str: Name of the current asset, that can be either a filename
a waveform, a sequence ect.
"""
return self.current_loaded_asset
def _send_file(self, filename):
""" Sends an already hardware specific waveform file to the pulse
generators waveform directory.
@param string filename: The file name of the source file
@return int: error code (0:OK, -1:error)
Unused for digital pulse generators without sequence storage capability
(PulseBlaster, FPGA).
"""
filepath = os.path.join(self.host_waveform_directory, filename)
with FTP(self.ip_address) as ftp:
ftp.login(user=self.user,passwd=self.passwd) # login as default user anonymous, passwd anonymous@
ftp.cwd(self.asset_directory)
with open(filepath, 'rb') as uploaded_file:
ftp.storbinary('STOR '+filename, uploaded_file)
return 0
def clear_all(self):
""" Clears the loaded waveform from the pulse generators RAM.
@return int: error code (0:OK, -1:error)
Delete all waveforms and sequences from Hardware memory and clear the
visual display. Unused for digital pulse generators without sequence
storage capability (PulseBlaster, FPGA).
"""
# Check if AWG is in function generator mode
# self._activate_awg_mode()
self.awg.write('WLIS:WAV:DEL ALL')
self.awg.write('SLIS:SEQ:DEL ALL')
while int(self.awg.query('*OPC?')) != 1:
time.sleep(0.25)
self.current_loaded_asset = ''
return 0
def get_status(self):
""" Retrieves the status of the pulsing hardware
@return (int, dict): inter value of the current status with the
corresponding dictionary containing status
description for all the possible status variables
of the pulse generator hardware.
0 indicates that the instrument has stopped.
1 indicates that the instrument is waiting for trigger.
2 indicates that the instrument is running.
-1 indicates that the request of the status for AWG has failed.
"""
status_dic = {}
status_dic[-1] = 'Failed Request or Communication'
status_dic[0] = 'Device has stopped, but can receive commands.'
status_dic[1] = 'Device is active and running.'
# All the other status messages should have higher integer values
# then 1.
return self.current_status, status_dic
def set_sample_rate(self, sample_rate):
""" Set the sample rate of the pulse generator hardware
@param float sample_rate: The sample rate to be set (in Hz)
@return foat: the sample rate returned from the device (-1:error)
"""
# Check if AWG is in function generator mode
# self._activate_awg_mode()
self.awg.write('CLOCK:SRATE %.4G' % sample_rate)
while int(self.awg.query('*OPC?')) != 1:
time.sleep(0.25)
time.sleep(1)
self.get_sample_rate()
return self.sample_rate
def get_sample_rate(self):
""" Set the sample rate of the pulse generator hardware
@return float: The current sample rate of the device (in Hz)
"""
# Check if AWG is in function generator mode
# self._activate_awg_mode()
return_rate = float(self.awg.query('CLOCK:SRATE?'))
self.sample_rate = return_rate
return self.sample_rate
def get_analog_level(self, amplitude=None, offset=None):
""" Retrieve the analog amplitude and offset of the provided channels.
@param list amplitude: optional, if a specific amplitude value (in Volt
peak to peak, i.e. the full amplitude) of a
channel is desired.
@param list offset: optional, if a specific high value (in Volt) of a
channel is desired.
@return: ({}, {}): tuple of two dicts, with keys being the channel
number and items being the values for those channels.
Amplitude is always denoted in Volt-peak-to-peak and
Offset in (absolute) Voltage.
If no entries provided then the levels of all channels where simply
returned. If no analog channels provided, return just an empty dict.
Example of a possible input:
amplitude = [1,4], offset =[1,3]
to obtain the amplitude of channel 1 and 4 and the offset
{1: -0.5, 4: 2.0} {}
since no high request was performed.
Note, the major difference to digital signals is that analog signals are
always oscillating or changing signals, otherwise you can use just
digital output. In contrast to digital output levels, analog output
levels are defined by an amplitude (here total signal span, denoted in
Voltage peak to peak) and an offset (denoted by an (absolute) voltage).
"""
amp = {}
off = {}
# Check if AWG is in function generator mode
# self._activate_awg_mode()
chnl_list = ['a_ch' + str(ch_num) for ch_num in
range(1, self._get_max_a_channel_number() + 1)]
pattern = re.compile('[0-9]+')
# get pp amplitudes
if amplitude is None:
for ch_num, chnl in enumerate(chnl_list):
amp[chnl] = float(self.awg.query('SOUR' + str(ch_num + 1) + ':VOLT:AMPL?'))
else:
for chnl in amplitude:
if chnl in chnl_list:
ch_num = int(re.search(pattern, chnl).group(0))
amp[chnl] = float(self.awg.query('SOUR' + str(ch_num) + ':VOLT:AMPL?'))
else:
self.log.warning('Get analog amplitude from AWG70k channel "{0}" failed. '
'Channel non-existent.'.format(str(chnl)))
# get voltage offsets
if offset is None:
for ch_num, chnl in enumerate(chnl_list):
off[chnl] = 0.0
else:
for chnl in offset:
if chnl in chnl_list:
ch_num = int(re.search(pattern, chnl).group(0))
off[chnl] = 0.0
else:
self.log.warning('Get analog offset from AWG70k channel "{0}" failed. '
'Channel non-existent.'.format(str(chnl)))
self.amplitude_list = amp
self.offset_list = off
return amp, off
def set_analog_level(self, amplitude=None, offset=None):
""" Set amplitude and/or offset value of the provided analog channel.
@param dict amplitude: dictionary, with key being the channel and items
being the amplitude values (in Volt peak to peak,
i.e. the full amplitude) for the desired channel.
@param dict offset: dictionary, with key being the channel and items
being the offset values (in absolute volt) for the
desired channel.
If nothing is passed then the command is being ignored.
Note, the major difference to digital signals is that analog signals are
always oscillating or changing signals, otherwise you can use just
digital output. In contrast to digital output levels, analog output
levels are defined by an amplitude (here total signal span, denoted in
Voltage peak to peak) and an offset (denoted by an (absolute) voltage).
In general there is not a bijective correspondence between
(amplitude, offset) for analog and (value high, value low) for digital!
"""
# Check the inputs by using the constraints...
constraints = self.get_constraints()
# ...and the channel numbers
num_of_channels = self._get_max_a_channel_number()
# Check if AWG is in function generator mode
# self._activate_awg_mode()
# amplitude sanity check
pattern = re.compile('[0-9]+')
if amplitude is not None:
for chnl in amplitude:
ch_num = int(re.search(pattern, chnl).group(0))
if ch_num > num_of_channels or ch_num < 1:
self.log.warning('Channel to set (a_ch{0}) not available in AWG.\nSetting '
'analogue voltage for this channel ignored.'.format(chnl))
del amplitude[chnl]
if amplitude[chnl] < constraints.a_ch_amplitude.min:
self.log.warning('Minimum Vpp for channel "{0}" is {1}. Requested Vpp of {2}V '
'was ignored and instead set to min value.'
''.format(chnl, constraints.a_ch_amplitude.min,
amplitude[chnl]))
amplitude[chnl] = constraints.a_ch_amplitude.min
elif amplitude[chnl] > constraints.a_ch_amplitude.max:
self.log.warning('Maximum Vpp for channel "{0}" is {1}. Requested Vpp of {2}V '
'was ignored and instead set to max value.'
''.format(chnl, constraints.a_ch_amplitude.max,
amplitude[chnl]))
amplitude[chnl] = constraints.a_ch_amplitude.max
# offset sanity check
if offset is not None:
for chnl in offset:
ch_num = int(re.search(pattern, chnl).group(0))
if ch_num > num_of_channels or ch_num < 1:
self.log.warning('Channel to set (a_ch{0}) not available in AWG.\nSetting '
'offset voltage for this channel ignored.'.format(chnl))
del offset[chnl]
if offset[chnl] < constraints.a_ch_offset.min:
self.log.warning('Minimum offset for channel "{0}" is {1}. Requested offset of '
'{2}V was ignored and instead set to min value.'
''.format(chnl, constraints.a_ch_offset.min, offset[chnl]))
offset[chnl] = constraints.a_ch_offset.min
elif offset[chnl] > constraints.a_ch_offset.max:
self.log.warning('Maximum offset for channel "{0}" is {1}. Requested offset of '
'{2}V was ignored and instead set to max value.'
''.format(chnl, constraints.a_ch_offset.max,
offset[chnl]))
offset[chnl] = constraints.a_ch_offset.max
if amplitude is not None:
for a_ch in amplitude:
self.awg.write('SOUR{0}:VOLT:AMPL {1}'.format(a_ch, amplitude[a_ch]))
self.amplitude_list[a_ch] = amplitude[a_ch]
while int(self.awg.query('*OPC?')) != 1:
time.sleep(0.25)
if offset is not None:
for a_ch in offset:
self.awg.write('SOUR{0}:VOLT:OFFSET {1}'.format(a_ch, offset[a_ch]))
self.offset_list[a_ch] = offset[a_ch]
while int(self.awg.query('*OPC?')) != 1:
time.sleep(0.25)
return self.amplitude_list, self.offset_list
def get_digital_level(self, low=None, high=None):
""" Retrieve the digital low and high level of the provided channels.
@param list low: optional, if a specific low value (in Volt) of a
channel is desired.
@param list high: optional, if a specific high value (in Volt) of a
channel is desired.
@return: tuple of two dicts, with keys being the channel number and
items being the values for those channels. Both low and high
value of a channel is denoted in (absolute) Voltage.
If no entries provided then the levels of all channels where simply
returned. If no digital channels provided, return just an empty dict.
Example of a possible input:
low = [1,4]
to obtain the low voltage values of digital channel 1 an 4. A possible
answer might be
{1: -0.5, 4: 2.0} {}
since no high request was performed.
Note, the major difference to analog signals is that digital signals are
either ON or OFF, whereas analog channels have a varying amplitude
range. In contrast to analog output levels, digital output levels are
defined by a voltage, which corresponds to the ON status and a voltage
which corresponds to the OFF status (both denoted in (absolute) voltage)
In general there is not a bijective correspondence between
(amplitude, offset) for analog and (value high, value low) for digital!
"""
# FIXME: Test with multiple channel AWG
low_val = {}
high_val = {}
# Check if AWG is in function generator mode
# self._activate_awg_mode()
digital_channels = list(range(1, 2 * self._get_max_a_channel_number() + 1))
analog_channels = [chnl // 2 + chnl % 2 for chnl in digital_channels]
marker_indices = [((chnl - 1) % 2) + 1 for chnl in digital_channels]
# get low marker levels
if low is None:
for chnl in digital_channels:
low_val[chnl] = float(
self.awg.query('SOUR' + str(analog_channels[chnl - 1]) + ':MARK'
+ str(marker_indices[chnl - 1]) + ':VOLT:LOW?'))
else:
for chnl in low:
low_val[chnl] = float(
self.awg.query('SOUR' + str(analog_channels[chnl - 1]) + ':MARK'
+ str(marker_indices[chnl - 1]) + ':VOLT:LOW?'))
# get high marker levels
if high is None:
for chnl in digital_channels:
high_val[chnl] = float(self.awg.query('SOUR' + str(analog_channels[chnl - 1])
+ ':MARK' + str(marker_indices[chnl - 1])
+ ':VOLT:HIGH?'))
else:
for chnl in high:
high_val[chnl] = float(self.awg.query('SOUR' + str(analog_channels[chnl - 1])
+ ':MARK' + str(marker_indices[chnl - 1])
+ ':VOLT:HIGH?'))
self.markers_high = high_val
self.markers_low = low_val
return low_val, high_val
def set_digital_level(self, low=None, high=None):
""" Set low and/or high value of the provided digital channel.
@param dict low: dictionary, with key being the channel and items being
the low values (in volt) for the desired channel.
@param dict high: dictionary, with key being the channel and items being
the high values (in volt) for the desired channel.
If nothing is passed then the command is being ignored.
Note, the major difference to analog signals is that digital signals are
either ON or OFF, whereas analog channels have a varying amplitude
range. In contrast to analog output levels, digital output levels are
defined by a voltage, which corresponds to the ON status and a voltage
which corresponds to the OFF status (both denoted in (absolute) voltage)
In general there is not a bijective correspondence between
(amplitude, offset) for analog and (value high, value low) for digital!
"""
if low is None:
low = {}
if high is None:
high = {}
# Check if AWG is in function generator mode
# self._activate_awg_mode()
#If you want to check the input use the constraints:
constraints = self.get_constraints()
for d_ch in low:
#FIXME: Tell the device the proper digital voltage low value:
# self.tell('SOURCE1:MARKER{0}:VOLTAGE:LOW {1}'.format(d_ch, low[d_ch]))
pass
for d_ch in high:
#FIXME: Tell the device the proper digital voltage high value:
# self.tell('SOURCE1:MARKER{0}:VOLTAGE:HIGH {1}'.format(d_ch, high[d_ch]))
pass
def get_active_channels(self, ch=None):
""" Get the active channels of the pulse generator hardware.
@param list ch: optional, if specific analog or digital channels are
needed to be asked without obtaining all the channels.
@return dict: where keys denoting the channel number and items boolean
expressions whether channel are active or not.
Example for an possible input (order is not important):
ch = ['a_ch2', 'd_ch2', 'a_ch1', 'd_ch5', 'd_ch1']
then the output might look like
{'a_ch2': True, 'd_ch2': False, 'a_ch1': False, 'd_ch5': True, 'd_ch1': False}
If no parameters are passed to this method all channels will be asked
for their setting.
"""
# If you want to check the input use the constraints:
constraints = self.get_constraints()
max_analog_channels = self._get_max_a_channel_number()
# Check if AWG is in function generator mode
# self._activate_awg_mode()
active_ch = {}
for a_ch in range(max_analog_channels):
active_ch['a_ch' + str(a_ch + 1)] = False
active_ch['d_ch' + str((2 * a_ch) + 1)] = False
active_ch['d_ch' + str((2 * a_ch) + 2)] = False
# check what analog channels are active
for a_ch in range(1, max_analog_channels + 1):
if bool(int(self.awg.query('OUTPUT' + str(a_ch) + ':STATE?'))):
active_ch['a_ch' + str(a_ch)] = True
# check how many markers are active on each channel, i.e. the DAC resolution
max_res = constraints.dac_resolution['max']
for a_ch in range(max_analog_channels):
if active_ch['a_ch' + str(a_ch + 1)]:
digital_mrk = max_res - int(self.awg.query('SOUR' + str(a_ch + 1) + ':DAC:RES?'))
if digital_mrk > 0:
active_ch['d_ch' + str((2 * a_ch) + 1)] = True
if digital_mrk == 2:
active_ch['d_ch' + str((2 * a_ch) + 2)] = True
self.active_channel = active_ch
# return either all channel information or just the one asked for.
if ch is None:
return_ch = active_ch
else:
return_ch = dict()
for channel in ch:
return_ch[channel] = active_ch[channel]
return return_ch
def set_active_channels(self, ch=None):
""" Set the active channels for the pulse generator hardware.
@param dict ch: dictionary with keys being the analog or digital
string generic names for the channels with items being
a boolean value.
@return dict: with the actual set values for active channels for analog
and digital values.
If nothing is passed then the command will return an empty dict.
Note: After setting the active channels of the device, retrieve them
again for obtaining the actual set value(s) and use that
information for further processing.
Example for possible input:
ch={'a_ch2': True, 'd_ch1': False, 'd_ch3': True, 'd_ch4': True}
to activate analog channel 2 digital channel 3 and 4 and to deactivate
digital channel 1.
AWG5000 Series instruments support only 14-bit resolution. Therefore
this command will have no effect on the DAC for these instruments. On
other devices the deactivation of digital channels increase the DAC
resolution of the analog channels.
"""
if ch is None:
return {}
constraints = self.get_constraints()
# Check if AWG is in function generator mode
# self._activate_awg_mode()
new_channels_state = self.active_channel.copy()
for chnl in ch:
if chnl in self.active_channel:
new_channels_state[chnl] = ch[chnl]
else:
self.log.error('Trying to (de)activate channel "{0}". This channel is not present '
'in AWG. Setting channels aborted.'.format(chnl))
return {}
# check if the channels to set are part of the activation_config constraints
new_active_channels = [chnl for chnl in new_channels_state if new_channels_state[chnl]]
new_active_channels.sort()
active_channels_ok = False
for conf in constraints.activation_config:
if sorted(constraints.activation_config[conf]) == new_active_channels:
active_channels_ok = True
if not active_channels_ok:
self.log.error('activation_config to set ({0}) is not allowed according to constraints.'
''.format(new_active_channels))
return {}
# get lists of all digital and analog channels separately
a_chan = [chnl for chnl in new_channels_state if 'a_ch' in chnl]
d_chan = [chnl for chnl in new_channels_state if 'd_ch' in chnl]
# calculate dac resolution for each analog channel and set it in hardware.
# Also (de)activate the analog channels accordingly
num_pattern = re.compile('[0-9]+')
max_res = constraints.dac_resolution['max']
for a_ch in a_chan:
ach_num = int(re.search(num_pattern, a_ch).group(0))
# determine number of markers for current a_ch
if new_channels_state['d_ch' + str(2 * ach_num - 1)]:
if new_channels_state['d_ch' + str(2 * ach_num)]:
marker_num = 2
else:
marker_num = 1
else:
marker_num = 0
# set DAC resolution for this channel
dac_res = max_res - marker_num
self.awg.write('SOUR' + str(ach_num) + ':DAC:RES ' + str(dac_res))
# (de)activate the analog channel
if new_channels_state[a_ch]:
self.awg.write('OUTPUT' + str(ach_num) + ':STATE ON')
else:
self.awg.write('OUTPUT' + str(ach_num) + ':STATE OFF')
self.active_channel = new_channels_state
return self.active_channel
def get_uploaded_asset_names(self):
""" Retrieve the names of all uploaded assets on the device.
@return list: List of all uploaded asset name strings in the current
device directory. This is no list of the file names.
Unused for digital pulse generators without sequence storage capability
(PulseBlaster, FPGA).
"""
uploaded_files = self._get_filenames_on_device()
name_list = []
for filename in uploaded_files:
asset_name = None
if fnmatch(filename, '*_ch?.wfmx'):
asset_name = filename.rsplit('_', 1)[0]
elif fnmatch(filename, '*_ch?.wfm'):
asset_name = filename.rsplit('_', 1)[0]
elif filename.endswith('.seqx'):
asset_name = filename[:-5]
elif filename.endswith('.seq'):
asset_name = filename[:-4]
elif filename.endswith('.mat'):
asset_name = filename[:-4]
if asset_name is not None and asset_name not in name_list:
name_list.append(asset_name)
return name_list
def get_saved_asset_names(self):
""" Retrieve the names of all sampled and saved assets on the host PC.
This is no list of the file names.
@return list: List of all saved asset name strings in the current
directory of the host PC.
"""
# list of all files in the waveform directory ending with .mat or .WFMX
file_list = self._get_filenames_on_host()
# exclude the channel specifier for multiple analog channels and create return list
name_list = []
for filename in file_list:
asset_name = None
if fnmatch(filename, '*_ch?.wfmx'):
asset_name = filename.rsplit('_', 1)[0]
elif fnmatch(filename, '*_ch?.wfm'):
asset_name = filename.rsplit('_', 1)[0]
elif filename.endswith('.seqx'):
asset_name = filename[:-5]
elif filename.endswith('.seq'):
asset_name = filename[:-4]
elif filename.endswith('.mat'):
asset_name = filename[:-4]
if asset_name is not None and asset_name not in name_list:
name_list.append(asset_name)
return name_list
def delete_asset(self, asset_name):
""" Delete all files associated with an asset with the passed asset_name from the device memory.
@param str asset_name: The name of the asset to be deleted
Optionally a list of asset names can be passed.
@return list: a list with strings of the files which were deleted.
Unused for digital pulse generators without sequence storage capability
(PulseBlaster, FPGA).
"""
if not isinstance(asset_name, list):
asset_name = [asset_name]
# self._activate_awg_mode()
# get all uploaded files and asset names in workspace
uploaded_files = self._get_filenames_on_device()
wfm_list = self._get_waveform_names_memory()
seq_list = self._get_sequence_names_memory()
# Create list of uploaded files to be deleted
files_to_delete = []
for name in asset_name:
for filename in uploaded_files:
if fnmatch(filename, name + '_ch?.wfm*') or \
fnmatch(filename, name + '.wfm*') or \
filename.endswith(('.mat', '.seq', '.seqx')):
files_to_delete.append(filename)
# delete files
with FTP(self.ip_address) as ftp:
# login as default user anonymous, passwd anonymous@
ftp.login(user=self.user, passwd=self.passwd)
ftp.cwd(self.asset_directory)
for filename in files_to_delete:
ftp.delete(filename)
# clear waveforms from AWG workspace
for wfm in wfm_list:
for name in asset_name:
if fnmatch(wfm, name + '_ch?') or wfm == name:
self.awg.write('WLIS:WAV:DEL "{0}"'.format(wfm))
# clear sequences from AWG workspace
for name in asset_name:
if name in seq_list:
self.awg.write('SLIS:SEQ:DEL "{0}"'.format(name))
return files_to_delete
def set_asset_dir_on_device(self, dir_path):
""" Change the directory where the assets are stored on the device.
@param string dir_path: The target directory
@return int: error code (0:OK, -1:error)
Unused for digital pulse generators without changeable file structure
(PulseBlaster, FPGA).
"""
# check whether the desired directory exists:
with FTP(self.ip_address) as ftp:
ftp.login(user=self.user,passwd=self.passwd) # login as default user anonymous, passwd anonymous@
try:
ftp.cwd(dir_path)
except:
self.log.info('Desired directory {0} not found on AWG '
'device.\n'
'Create new.'.format(dir_path))
ftp.mkd(dir_path)
self.asset_directory = dir_path
return 0
def get_asset_dir_on_device(self):
""" Ask for the directory where the assets are stored on the device.
@return string: The current sequence directory
Unused for digital pulse generators without changeable file structure
(PulseBlaster, FPGA).
"""
return self.asset_directory
def has_sequence_mode(self):
""" Asks the pulse generator whether sequence mode exists.
@return: bool, True for yes, False for no.
"""
options = self.awg.query('*OPT?')[1:-2].split(',')
has_seq_mode = '03' in options
return has_seq_mode
def set_interleave(self, state=False):
""" Turns the interleave of an AWG on or off.
@param bool state: The state the interleave should be set to
(True: ON, False: OFF)
@return int: error code (0:OK, -1:error)
Unused for pulse generator hardware other than an AWG. The AWG 5000
Series does not have an interleave mode and this method exists only for
compability reasons.
"""
if state:
self.log.warning('Interleave mode not available for the AWG 70000 Series!\n'
'Method call will be ignored.')
return False
def get_interleave(self):
""" Check whether Interleave is on in AWG.
Unused for pulse generator hardware other than an AWG. The AWG 70000
Series does not have an interleave mode and this method exists only for
compability reasons.
@return bool: will be always False since no interleave functionality
"""
return False
def reset(self):
"""Reset the device.
@return int: error code (0:OK, -1:error)
"""
self.awg.write('*RST')
self.awg.write('*WAI')
return 0
def ask(self, question):
""" Asks the device a 'question' and receive and return an answer from it.
@param string question: string containing the command
@return string: the answer of the device to the 'question' in a string
"""
answer = self.awg.query(question).replace('\n', '')
return answer
def tell(self, command):
""" Sends a command string to the device.
@param string command: string containing the command
@return int: error code (0:OK, -1:error)
"""
bytes_written, enum_status_code = self.awg.write(command)
return int(enum_status_code)
def direct_write_ensemble(self, ensemble_name, analog_samples, digital_samples):
"""
@param ensemble_name: Name for the waveform to be created.
@param analog_samples: numpy.ndarray of type float32 containing the voltage samples.
@param digital_samples: numpy.ndarray of type bool containing the marker states for each
sample.
First dimension is marker index; second dimension is sample number
@return:
"""
# check input
if not ensemble_name:
self.log.error('Please specify an ensemble name for direct waveform creation.')
return -1
if type(analog_samples).__name__ != 'ndarray':
self.log.warning('Analog samples for direct waveform creation have wrong data type.\n'
'Converting to numpy.ndarray of type float32.')
analog_samples = np.array(analog_samples, dtype='float32')
if type(digital_samples).__name__ != 'ndarray':
self.log.warning('Digital samples for direct waveform creation have wrong data type.\n'
'Converting to numpy.ndarray of type bool.')
digital_samples = np.array(digital_samples, dtype=bool)
min_samples = int(self.awg.query('WLIS:WAV:LMIN?'))
if analog_samples.shape[1] < min_samples or digital_samples.shape[1] < min_samples:
self.log.error('Minimum waveform length for AWG70000A series is {0} samples.\n'
'Direct waveform creation failed.'.format(min_samples))
return -1
if analog_samples.shape[1] != digital_samples.shape[1]:
self.log.error('Number of analog and digital samples must be the same.\n'
'Direct waveform creation failed.')
return -1
# determine active channels
activation_dict = self.get_active_channels()
active_chnl = [chnl for chnl in activation_dict if activation_dict[chnl]]
active_analog = [chnl for chnl in active_chnl if 'a_ch' in chnl]
active_analog.sort()
active_digital = [chnl for chnl in active_chnl if 'd_ch' in chnl]
active_digital.sort()
# Sanity check of channel numbers
if len(active_analog) != analog_samples.shape[0] or len(active_digital) != digital_samples.shape[0]:
self.log.error('Mismatch of channel activation and sample array dimensions for direct '
'write.\nChannel activation is: {0} analog, {1} digital.\n'
'Sample arrays have: {2} analog, {3} digital.'
''.format(len(active_analog), len(active_digital),
analog_samples.shape[0], digital_samples.shape[0]))
return -1
for a_ch in active_analog:
a_ch_num = int(a_ch.split('ch')[-1])
mrk_ch_1 = 'd_ch{0}'.format(a_ch_num * 2 - 2)
mrk_ch_2 = 'd_ch{0}'.format(a_ch_num * 2 - 1)
wfm_name = ensemble_name + '_ch' + str(a_ch_num)
# Encode marker information in an array of bytes (uint8)
if mrk_ch_1 in active_digital and mrk_ch_2 in active_digital:
mrk1_index = active_digital.index(mrk_ch_1)
mrk2_index = active_digital.index(mrk_ch_2)
mrk_bytes = np.add(np.left_shift(digital_samples[mrk2_index].astype('uint8'), 7),
np.left_shift(digital_samples[mrk1_index].astype('uint8'), 6))
if mrk_ch_1 in active_digital and mrk_ch_2 not in active_digital:
mrk1_index = active_digital.index(mrk_ch_1)
mrk_bytes = np.left_shift(digital_samples[mrk1_index].astype('uint8'), 6)
else:
mrk_bytes = None
# Check if waveform already exists and delete if necessary.
if wfm_name in self._get_waveform_names_memory():
self.awg.write('WLIS:WAV:DEL "{0}"'.format(wfm_name))
# Create waveform in AWG workspace and fill in sample data
self.awg.write('WLIS:WAV:NEW "{0}", {1}'.format(wfm_name, digital_samples.shape[1]))
self.awg.write_values('WLIS:WAV:DATA "{0}",'.format(wfm_name),
analog_samples[a_ch_num - 1])
if mrk_bytes is not None:
self.awg.write_values('WLIS:WAV:MARK:DATA "{0}",'.format(wfm_name), mrk_bytes)
# Wait for everything to complete
while int(self.awg.query('*OPC?')) != 1:
time.sleep(0.2)
return 0
def direct_write_sequence(self, sequence_name, sequence_params):
"""
@param sequence_name:
@param sequence_params:
@return:
"""
trig_dict = {-1: 'OFF', 0: 'OFF', 1: 'ATR', 2: 'BTR'}
active_analog = [chnl for chnl in self.get_active_channels() if 'a_ch' in chnl]
num_tracks = len(active_analog)
num_steps = len(sequence_params)
# Check if sequence already exists and delete if necessary.
if sequence_name in self._get_sequence_names_memory():
self.awg.write('SLIS:SEQ:DEL "{0}"'.format(sequence_name))
# Create new sequence and set jump timing to immediate
self.awg.write('SLIS:SEQ:NEW "{0}", {1}, {2}'.format(sequence_name, num_steps, num_tracks))
self.awg.write('SLIS:SEQ:EVEN:JTIM "{0}", IMM'.format(sequence_name))
# Fill in sequence information
for step in range(num_steps):
self.awg.write('SLIS:SEQ:STEP{0}:EJIN "{1}", {2}'.format(step + 1, sequence_name,
trig_dict[sequence_params[step]['trigger_wait']]))
if sequence_params[step]['event_jump_to'] <= 0:
jumpto = 'NEXT'
else:
jumpto = str(sequence_params[step]['event_jump_to'])
self.awg.write('SLIS:SEQ:STEP{0}:EJUM "{1}", {2}'.format(step + 1,
sequence_name, jumpto))
if sequence_params[step]['repetitions'] <= 0:
repeat = 'INF'
else:
repeat = str(sequence_params[step]['repetitions'])
self.awg.write('SLIS:SEQ:STEP{0}:RCO "{1}", {2}'.format(step + 1,
sequence_name, repeat))
if sequence_params[step]['go_to'] <= 0:
goto = 'NEXT'
else:
goto = str(sequence_params[step]['go_to'])
self.awg.write('SLIS:SEQ:STEP{0}:GOTO "{1}", {2}'.format(step + 1, sequence_name, goto))
waveform_name = sequence_params[step]['name'][0].rsplit('_ch', 1)[0]
if num_tracks == 1:
self.awg.write('SLIS:SEQ:STEP{0}:TASS1:WAV "{1}", "{2}"'.format(step + 1,
sequence_name,
waveform_name + '_ch1'))
elif num_tracks == 2:
self.awg.write('SLIS:SEQ:STEP{0}:TASS1:WAV "{1}", "{2}"'.format(step + 1,
sequence_name,
waveform_name + '_ch1'))
self.awg.write('SLIS:SEQ:STEP{0}:TASS2:WAV "{1}", "{2}"'.format(step + 1,
sequence_name,
waveform_name + '_ch2'))
# Wait for everything to complete
while int(self.awg.query('*OPC?')) != 1:
time.sleep(0.2)
return 0
def _init_loaded_asset(self):
"""
Gets the name of the currently loaded asset from the AWG and sets the attribute accordingly.
"""
# Check if AWG is in function generator mode
# self._activate_awg_mode()
# first get all the channel assets
a_ch_asset = [self.awg.query('SOUR{0}:CASS?'.format(count))[1:-2]
for count in range(1, self._get_max_a_channel_number() + 1)]
tmp_list = [a_ch.split('_ch') for a_ch in a_ch_asset]
a_ch_asset = [ele[0] for ele in filter(lambda x: len(x) == 2, tmp_list)]
# the case
if len(a_ch_asset) != 0:
all_same = True
for asset in a_ch_asset:
if asset != a_ch_asset[0]:
all_same = False
break
if all_same:
self.current_loaded_asset = a_ch_asset[0]
else:
self.log.error("In _init_loaded_asset: The case of differing asset names is not "
"yet handled")
self.current_loaded_asset = ''
else:
self.current_loaded_asset = ''
return self.current_loaded_asset
def _get_sequence_names_memory(self):
"""
Gets all sequence names currently loaded into the AWG workspace
@return: list of names
"""
number_of_seq = int(self.awg.query('SLIS:SIZE?'))
sequence_list = [None] * number_of_seq
for i in range(number_of_seq):
seq_name = self.awg.query('SLIS:NAME? {0}'.format(i + 1))[1:-2]
sequence_list[i] = seq_name
return sequence_list
def _get_dir_for_name(self, name):
""" Get the path to the pulsed sub-directory 'name'.
@param name: string, name of the folder
@return: string, absolute path to the directory with folder 'name'.
"""
path = os.path.join(self.pulsed_file_dir, name)
if not os.path.exists(path):
os.makedirs(os.path.abspath(path))
return os.path.abspath(path)
def _get_filenames_on_device(self):
""" Get the full filenames of all assets saved on the device.
@return: list, The full filenames of all assets saved on the device.
"""
filename_list = []
with FTP(self.ip_address) as ftp:
ftp.login(user=self.user,passwd=self.passwd) # login as default user anonymous, passwd anonymous@
ftp.cwd(self.asset_directory)
# get only the files from the dir and skip possible directories
log =[]
file_list = []
ftp.retrlines('LIST', callback=log.append)
for line in log:
if '<DIR>' not in line:
# that is how a potential line is looking like:
# '05-10-16 05:22PM 292 SSR aom adjusted.seq'
# One can see that the first part consists of the date
# information. Remove those information and separate then
# the first number, which indicates the size of the file,
# from the following. That is necessary if the filename has
# whitespaces in the name:
size_filename = line[18:].lstrip()
# split after the first appearing whitespace and take the
# rest as filename, remove for safety all trailing
# whitespaces:
actual_filename = size_filename.split(' ', 1)[1].lstrip()
file_list.append(actual_filename)
for filename in file_list:
if filename.endswith(('.wfm', '.wfmx', '.mat', '.seq', '.seqx')):
if filename not in filename_list:
filename_list.append(filename)
return filename_list
def _get_filenames_on_host(self):
""" Get the full filenames of all assets saved on the host PC.
@return: list, The full filenames of all assets saved on the host PC.
"""
filename_list = [f for f in os.listdir(self.host_waveform_directory) if
f.endswith('.wfmx') or f.endswith('.wfm') or f.endswith(
'.seq') or f.endswith('.mat')]
return filename_list
def _get_model_ID(self):
"""
@return: a string which represents the model id of the AWG.
"""
model_id = self.awg.query('*IDN?').replace('\n', '').split(',')
return model_id
def _get_max_a_channel_number(self):
"""
@return: Returns an integer which represents the number of analog
channels.
"""
constraints = self.get_constraints()
config = constraints.activation_config
largest_list = config[max(config, key=config.get)]
lst = [kk for kk in largest_list if 'a_ch' in kk]
analog_channel_lst = [w.replace('a_ch', '') for w in lst]
max_number_of_channels = max(map(int, analog_channel_lst))
return max_number_of_channels
def _get_waveform_names_memory(self):
"""
Gets all waveform names currently loaded into the AWG workspace
@return: list of names
"""
# Check if AWG is in function generator mode
# self._activate_awg_mode()
number_of_wfm = int(self.awg.query('WLIS:SIZE?'))
waveform_list = [None] * number_of_wfm
for i in range(number_of_wfm):
wfm_name = self.awg.query('WLIS:NAME? {0}'.format(i + 1))[1:-2]
waveform_list[i] = wfm_name
return waveform_list
def _is_output_on(self):
"""
Aks the AWG if the output is enabled, i.e. if the AWG is running
@return: bool, (True: output on, False: output off)
"""
run_state = bool(int(self.awg.query('AWGC:RST?')))
return run_state
# def _activate_awg_mode(self):
# """
# Helper method to activate AWG mode if the device is currently in function generator mode.
# """
# # Check if AWG is still in MW mode (function generator mode)
# if self.awg.query('INST:MODE?').replace('\n', '') != 'AWG':
# self.awg.write('INST:MODE AWG')
# self.awg.write('*WAI')
# return
| gpl-3.0 | 7,061,621,111,263,829,000 | 44.081023 | 119 | 0.566239 | false |
sdague/home-assistant | homeassistant/components/smhi/__init__.py | 26 | 1025 | """Support for the Swedish weather institute weather service."""
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import Config, HomeAssistant
# Have to import for config_flow to work even if they are not used here
from .config_flow import smhi_locations # noqa: F401
from .const import DOMAIN # noqa: F401
DEFAULT_NAME = "smhi"
async def async_setup(hass: HomeAssistant, config: Config) -> bool:
"""Set up configured SMHI."""
# We allow setup only through config flow type of config
return True
async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool:
"""Set up SMHI forecast as config entry."""
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(config_entry, "weather")
)
return True
async def async_unload_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool:
"""Unload a config entry."""
await hass.config_entries.async_forward_entry_unload(config_entry, "weather")
return True
| apache-2.0 | -175,163,716,586,589,380 | 34.344828 | 85 | 0.729756 | false |
OCA/l10n-brazil | l10n_br_purchase_stock/wizards/stock_invocing_onshipping.py | 1 | 2256 | # @ 2021 Akretion - www.akretion.com.br -
# Magno Costa <[email protected]>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from odoo import fields, models
class StockInvoiceOnshipping(models.TransientModel):
_inherit = "stock.invoice.onshipping"
def _build_invoice_values_from_pickings(self, pickings):
"""
Build dict to create a new invoice from given pickings
:param pickings: stock.picking recordset
:return: dict
"""
invoice, values = super()._build_invoice_values_from_pickings(pickings)
pick = fields.first(pickings)
if pick.purchase_id:
values["purchase_id"] = pick.purchase_id.id
if pick.purchase_id.payment_term_id.id != values["payment_term_id"]:
values.update({"payment_term_id": pick.purchase_id.payment_term_id.id})
return invoice, values
def _get_move_key(self, move):
"""
Get the key based on the given move
:param move: stock.move recordset
:return: key
"""
key = super()._get_move_key(move)
if move.purchase_line_id:
# TODO: deveria permitir agrupar as linhas ?
# Deveria permitir agrupar Pedidos de Compras ?
if type(key) is tuple:
key = key + (move.purchase_line_id,)
else:
# TODO - seria melhor identificar o TYPE para saber se
# o KEY realmente é um objeto nesse caso
key = (key, move.purchase_line_id)
return key
def _get_invoice_line_values(self, moves, invoice_values, invoice):
"""
Create invoice line values from given moves
:param moves: stock.move
:param invoice: account.invoice
:return: dict
"""
values = super()._get_invoice_line_values(moves, invoice_values, invoice)
# Devido ao KEY com purchase_line_id aqui
# vem somente um registro
if len(moves) == 1:
# Caso venha apenas uma linha porem sem
# purchase_line_id é preciso ignora-la
if moves.purchase_line_id:
values["purchase_line_id"] = moves.purchase_line_id.id
return values
| agpl-3.0 | 1,582,065,237,709,572,900 | 34.777778 | 87 | 0.59583 | false |
cloudera/recordservice | thirdparty/thrift-0.9.0/test/py/TestServer.py | 30 | 7232 | #!/usr/bin/env python
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
from __future__ import division
import sys, glob, time
sys.path.insert(0, glob.glob('../../lib/py/build/lib.*')[0])
from optparse import OptionParser
parser = OptionParser()
parser.add_option('--genpydir', type='string', dest='genpydir',
default='gen-py',
help='include this local directory in sys.path for locating generated code')
parser.add_option("--port", type="int", dest="port",
help="port number for server to listen on")
parser.add_option("--zlib", action="store_true", dest="zlib",
help="use zlib wrapper for compressed transport")
parser.add_option("--ssl", action="store_true", dest="ssl",
help="use SSL for encrypted transport")
parser.add_option('-v', '--verbose', action="store_const",
dest="verbose", const=2,
help="verbose output")
parser.add_option('-q', '--quiet', action="store_const",
dest="verbose", const=0,
help="minimal output")
parser.add_option('--proto', dest="proto", type="string",
help="protocol to use, one of: accel, binary, compact")
parser.set_defaults(port=9090, verbose=1, proto='binary')
options, args = parser.parse_args()
sys.path.insert(0, options.genpydir)
from ThriftTest import ThriftTest
from ThriftTest.ttypes import *
from thrift.transport import TTransport
from thrift.transport import TSocket
from thrift.transport import TZlibTransport
from thrift.protocol import TBinaryProtocol
from thrift.protocol import TCompactProtocol
from thrift.server import TServer, TNonblockingServer, THttpServer
PROT_FACTORIES = {'binary': TBinaryProtocol.TBinaryProtocolFactory,
'accel': TBinaryProtocol.TBinaryProtocolAcceleratedFactory,
'compact': TCompactProtocol.TCompactProtocolFactory}
class TestHandler:
def testVoid(self):
if options.verbose > 1:
print 'testVoid()'
def testString(self, str):
if options.verbose > 1:
print 'testString(%s)' % str
return str
def testByte(self, byte):
if options.verbose > 1:
print 'testByte(%d)' % byte
return byte
def testI16(self, i16):
if options.verbose > 1:
print 'testI16(%d)' % i16
return i16
def testI32(self, i32):
if options.verbose > 1:
print 'testI32(%d)' % i32
return i32
def testI64(self, i64):
if options.verbose > 1:
print 'testI64(%d)' % i64
return i64
def testDouble(self, dub):
if options.verbose > 1:
print 'testDouble(%f)' % dub
return dub
def testStruct(self, thing):
if options.verbose > 1:
print 'testStruct({%s, %d, %d, %d})' % (thing.string_thing, thing.byte_thing, thing.i32_thing, thing.i64_thing)
return thing
def testException(self, str):
if options.verbose > 1:
print 'testException(%s)' % str
if str == 'Xception':
x = Xception()
x.errorCode = 1001
x.message = str
raise x
elif str == "throw_undeclared":
raise ValueError("Exception test PASSES.")
def testOneway(self, seconds):
if options.verbose > 1:
print 'testOneway(%d) => sleeping...' % seconds
time.sleep(seconds / 3) # be quick
if options.verbose > 1:
print 'done sleeping'
def testNest(self, thing):
if options.verbose > 1:
print 'testNest(%s)' % thing
return thing
def testMap(self, thing):
if options.verbose > 1:
print 'testMap(%s)' % thing
return thing
def testSet(self, thing):
if options.verbose > 1:
print 'testSet(%s)' % thing
return thing
def testList(self, thing):
if options.verbose > 1:
print 'testList(%s)' % thing
return thing
def testEnum(self, thing):
if options.verbose > 1:
print 'testEnum(%s)' % thing
return thing
def testTypedef(self, thing):
if options.verbose > 1:
print 'testTypedef(%s)' % thing
return thing
def testMapMap(self, thing):
if options.verbose > 1:
print 'testMapMap(%s)' % thing
return thing
def testMulti(self, arg0, arg1, arg2, arg3, arg4, arg5):
if options.verbose > 1:
print 'testMulti(%s)' % [arg0, arg1, arg2, arg3, arg4, arg5]
x = Xtruct(byte_thing=arg0, i32_thing=arg1, i64_thing=arg2)
return x
# set up the protocol factory form the --proto option
pfactory_cls = PROT_FACTORIES.get(options.proto, None)
if pfactory_cls is None:
raise AssertionError('Unknown --proto option: %s' % options.proto)
pfactory = pfactory_cls()
# get the server type (TSimpleServer, TNonblockingServer, etc...)
if len(args) > 1:
raise AssertionError('Only one server type may be specified, not multiple types.')
server_type = args[0]
# Set up the handler and processor objects
handler = TestHandler()
processor = ThriftTest.Processor(handler)
# Handle THttpServer as a special case
if server_type == 'THttpServer':
server =THttpServer.THttpServer(processor, ('', options.port), pfactory)
server.serve()
sys.exit(0)
# set up server transport and transport factory
host = None
if options.ssl:
from thrift.transport import TSSLSocket
transport = TSSLSocket.TSSLServerSocket(host, options.port, certfile='test_cert.pem')
else:
transport = TSocket.TServerSocket(host, options.port)
tfactory = TTransport.TBufferedTransportFactory()
# if --zlib, then wrap server transport, and use a different transport factory
if options.zlib:
transport = TZlibTransport.TZlibTransport(transport) # wrap with zlib
tfactory = TZlibTransport.TZlibTransportFactory()
# do server-specific setup here:
if server_type == "TNonblockingServer":
server = TNonblockingServer.TNonblockingServer(processor, transport, inputProtocolFactory=pfactory)
elif server_type == "TProcessPoolServer":
import signal
from thrift.server import TProcessPoolServer
server = TProcessPoolServer.TProcessPoolServer(processor, transport, tfactory, pfactory)
server.setNumWorkers(5)
def set_alarm():
def clean_shutdown(signum, frame):
for worker in server.workers:
if options.verbose > 0:
print 'Terminating worker: %s' % worker
worker.terminate()
if options.verbose > 0:
print 'Requesting server to stop()'
try:
server.stop()
except:
pass
signal.signal(signal.SIGALRM, clean_shutdown)
signal.alarm(2)
set_alarm()
else:
# look up server class dynamically to instantiate server
ServerClass = getattr(TServer, server_type)
server = ServerClass(processor, transport, tfactory, pfactory)
# enter server main loop
server.serve()
| apache-2.0 | -2,568,211,774,749,857,300 | 31.142222 | 117 | 0.696626 | false |
fmacias64/deap | examples/pso/speciation.py | 12 | 6672 | # This file is part of DEAP.
#
# DEAP is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation, either version 3 of
# the License, or (at your option) any later version.
#
# DEAP is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with DEAP. If not, see <http://www.gnu.org/licenses/>.
"""Implementation of the Speciation Particle Swarm Optimization algorithm as
presented in *Li, Blackwell, and Branke, 2006, Particle Swarm with Speciation
and Adaptation in a Dynamic Environment.*
"""
import itertools
import math
import operator
import random
import numpy
try:
from itertools import imap
except:
# Python 3 nothing to do
pass
else:
map = imap
from deap import base
from deap.benchmarks import movingpeaks
from deap import creator
from deap import tools
scenario = movingpeaks.SCENARIO_2
NDIM = 5
BOUNDS = [scenario["min_coord"], scenario["max_coord"]]
mpb = movingpeaks.MovingPeaks(dim=NDIM, **scenario)
creator.create("FitnessMax", base.Fitness, weights=(1.0,))
creator.create("Particle", list, fitness=creator.FitnessMax, speed=list,
best=None, bestfit=creator.FitnessMax)
def generate(pclass, dim, pmin, pmax, smin, smax):
part = pclass(random.uniform(pmin, pmax) for _ in range(dim))
part.speed = [random.uniform(smin, smax) for _ in range(dim)]
return part
def convert_quantum(swarm, rcloud, centre):
dim = len(swarm[0])
for part in swarm:
position = [random.gauss(0, 1) for _ in range(dim)]
dist = math.sqrt(sum(x**2 for x in position))
# Gaussian distribution
# u = abs(random.gauss(0, 1.0/3.0))
# part[:] = [(rcloud * x * u**(1.0/dim) / dist) + c for x, c in zip(position, centre)]
# UVD distribution
# u = random.random()
# part[:] = [(rcloud * x * u**(1.0/dim) / dist) + c for x, c in zip(position, centre)]
# NUVD distribution
u = abs(random.gauss(0, 1.0/3.0))
part[:] = [(rcloud * x * u / dist) + c for x, c in zip(position, centre)]
del part.fitness.values
del part.bestfit.values
part.best = None
return swarm
def updateParticle(part, best, chi, c):
ce1 = (c*random.uniform(0, 1) for _ in range(len(part)))
ce2 = (c*random.uniform(0, 1) for _ in range(len(part)))
ce1_p = map(operator.mul, ce1, map(operator.sub, best, part))
ce2_g = map(operator.mul, ce2, map(operator.sub, part.best, part))
a = map(operator.sub,
map(operator.mul,
itertools.repeat(chi),
map(operator.add, ce1_p, ce2_g)),
map(operator.mul,
itertools.repeat(1-chi),
part.speed))
part.speed = list(map(operator.add, part.speed, a))
part[:] = list(map(operator.add, part, part.speed))
toolbox = base.Toolbox()
toolbox.register("particle", generate, creator.Particle, dim=NDIM,
pmin=BOUNDS[0], pmax=BOUNDS[1], smin=-(BOUNDS[1] - BOUNDS[0])/2.0,
smax=(BOUNDS[1] - BOUNDS[0])/2.0)
toolbox.register("swarm", tools.initRepeat, list, toolbox.particle)
toolbox.register("update", updateParticle, chi=0.729843788, c=2.05)
toolbox.register("convert", convert_quantum)
toolbox.register("evaluate", mpb)
def main(verbose=True):
NPARTICLES = 100
RS = (BOUNDS[1] - BOUNDS[0]) / (50**(1.0/NDIM)) # between 1/20 and 1/10 of the domain's range
PMAX = 10
RCLOUD = 1.0 # 0.5 times the move severity
stats = tools.Statistics(lambda ind: ind.fitness.values)
stats.register("avg", numpy.mean)
stats.register("std", numpy.std)
stats.register("min", numpy.min)
stats.register("max", numpy.max)
logbook = tools.Logbook()
logbook.header = "gen", "nswarm", "evals", "error", "offline_error", "avg", "max"
swarm = toolbox.swarm(n=NPARTICLES)
generation = 0
while mpb.nevals < 5e5:
# Evaluate each particle in the swarm
for part in swarm:
part.fitness.values = toolbox.evaluate(part)
if not part.best or part.bestfit < part.fitness:
part.best = toolbox.clone(part[:]) # Get the position
part.bestfit.values = part.fitness.values # Get the fitness
# Sort swarm into species, best individual comes first
sorted_swarm = sorted(swarm, key=lambda ind: ind.bestfit, reverse=True)
species = []
while sorted_swarm:
found = False
for s in species:
dist = math.sqrt(sum((x1 - x2)**2 for x1, x2 in zip(sorted_swarm[0].best, s[0].best)))
if dist <= RS:
found = True
s.append(sorted_swarm[0])
break
if not found:
species.append([sorted_swarm[0]])
sorted_swarm.pop(0)
record = stats.compile(swarm)
logbook.record(gen=generation, evals=mpb.nevals, nswarm=len(species),
error=mpb.currentError(), offline_error=mpb.offlineError(), **record)
if verbose:
print(logbook.stream)
# Detect change
if any(s[0].bestfit.values != toolbox.evaluate(s[0].best) for s in species):
# Convert particles to quantum particles
for s in species:
s[:] = toolbox.convert(s, rcloud=RCLOUD, centre=s[0].best)
else:
# Replace exceeding particles in a species with new particles
for s in species:
if len(s) > PMAX:
n = len(s) - PMAX
del s[PMAX:]
s.extend(toolbox.swarm(n=n))
# Update particles that have not been reinitialized
for s in species[:-1]:
for part in s[:PMAX]:
toolbox.update(part, s[0].best)
del part.fitness.values
# Return all but the worst species' updated particles to the swarm
# The worst species is replaced by new particles
swarm = list(itertools.chain(toolbox.swarm(n=len(species[-1])), *species[:-1]))
generation += 1
if __name__ == '__main__':
main()
| lgpl-3.0 | 3,291,423,557,657,767,400 | 35.861878 | 102 | 0.593225 | false |
RedHatQE/cfme_tests | cfme/scripting/conf.py | 1 | 3763 | #!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""Script to encrypt config files.
Usage:
scripts/encrypt_conf.py confname1 confname2 ... confnameN
scripts/encrypt_conf.py credentials
"""
import io
import click
import yaycl_crypt
from . import link_config
from cfme.utils import conf
@click.group(help='Functions affecting configuration files')
def main():
pass
main.add_command(link_config.main, name='link')
@main.command(help='Tests a yaml file')
@click.argument('conf_name', default='credentials')
def test(conf_name):
"""Test yaml file to see how many keys exist"""
creds = conf.__getattr__(conf_name)
print("{} keys found, if this value seems low, there may be a YAML error".format(len(creds)))
@main.command('show-credential', help='Shows the value of a crednetial key')
@click.argument('cred-or-provider-key')
@click.option('--only-credentials', is_flag=True, help='Only search credentials, (not providers)')
def show_credential(cred_or_provider_key, only_credentials):
"""Function to show the given credentials, takes either a provider key or a credential key"""
data = conf.cfme_data
if cred_or_provider_key in data.get('management_systems', {}) and not only_credentials:
endpoints_data = data['management_systems'][cred_or_provider_key].get('endpoints', {})
for endpoint in endpoints_data:
print(endpoint)
cred_key = endpoints_data[endpoint]['credentials']
cred_dict = conf.credentials[cred_key]
for k in cred_dict:
print(" {}: {}".format(k, cred_dict[k]))
elif cred_or_provider_key in conf.credentials:
cred_dict = conf.credentials[cred_or_provider_key]
for k in cred_dict:
print("{}: {}".format(k, cred_dict[k]))
else:
print("Key couldn't be found in providers or credentials YAMLS")
@main.command('show-provider', help='Shows the configuration of a provider')
@click.argument('provider-key')
def show_provider(provider_key):
"""Function to show provider data"""
output = io.BytesIO()
data = conf.cfme_data
if provider_key in data.get('management_systems', {}):
data['management_systems'][provider_key].dump(output)
print(output.getvalue())
else:
print("Key couldn't be found in provider data")
@main.command(help='Encrypts a yaml file')
@click.argument('conf_name', default='credentials')
@click.option('--delete', default=False, is_flag=True,
help='If supplied delete the unencrypted config of the same name.')
def encrypt(conf_name, delete):
"""Function to encrypt a given conf file"""
conf_name = conf_name.strip()
yaycl_crypt.encrypt_yaml(conf, conf_name, delete=delete)
print('{} conf encrypted'.format(conf_name))
if not delete:
print('WARNING: unencrypted file left which will override encrypted')
@main.command(help='Decrypts a yaml file')
@click.argument('conf_name', default='credentials')
@click.option('--delete', default=False, is_flag=True,
help='If supplied delete the encrypted config of the same name.')
@click.option('--skip/--no-skip', default=True,
help='If supplied raise exception if decrypted file already exists')
def decrypt(conf_name, delete, skip):
"""Function to decrypt a given conf file"""
conf_name = conf_name.strip()
try:
yaycl_crypt.decrypt_yaml(conf, conf_name, delete=delete)
except yaycl_crypt.YayclCryptError as ex:
if skip and 'overwrite' in ex.message:
print('{} conf decrypt skipped, decrypted file already exists'.format(conf_name))
return
else:
raise
print('{} conf decrypted'.format(conf_name))
if __name__ == "__main__":
main()
| gpl-2.0 | -8,515,425,333,429,540,000 | 34.838095 | 98 | 0.667021 | false |
hippyk/pix2code | model/convert_imgs_to_arrays.py | 2 | 1160 | #!/usr/bin/env python
from __future__ import print_function
from __future__ import absolute_import
__author__ = 'Tony Beltramelli - www.tonybeltramelli.com'
import os
import sys
import shutil
from classes.Utils import *
from classes.model.Config import *
argv = sys.argv[1:]
if len(argv) < 2:
print("Error: not enough argument supplied:")
print("convert_imgs_to_arrays.py <input path> <output path>")
exit(0)
else:
input_path = argv[0]
output_path = argv[1]
if not os.path.exists(output_path):
os.makedirs(output_path)
print("Converting images to numpy arrays...")
for f in os.listdir(input_path):
if f.find(".png") != -1:
img = Utils.get_preprocessed_img("{}/{}".format(input_path, f), IMAGE_SIZE)
file_name = f[:f.find(".png")]
np.savez_compressed("{}/{}".format(output_path, file_name), features=img)
retrieve = np.load("{}/{}.npz".format(output_path, file_name))["features"]
assert np.array_equal(img, retrieve)
shutil.copyfile("{}/{}.gui".format(input_path, file_name), "{}/{}.gui".format(output_path, file_name))
print("Numpy arrays saved in {}".format(output_path))
| apache-2.0 | -2,144,922,168,953,959,700 | 28 | 110 | 0.650862 | false |
jvkops/titanium_mobile | support/android/tilogger.py | 37 | 1757 | from __future__ import with_statement
import os, sys
class TiLogger:
ERROR = 0
WARN = 1
INFO = 2
DEBUG = 3
TRACE = 4
def __init__(self, logfile, level=TRACE, output_stream=sys.stdout):
self.level = level
self.output_stream = output_stream
global _logfile
_logfile = logfile
if _logfile is not None:
logfolder = os.path.dirname(_logfile)
try:
if not os.path.exists(logfolder):
os.mkdir(logfolder)
except:
print "[ERROR] error creating log folder %s: %s" % (logfolder, sys.exc_info()[0])
try:
with open(_logfile, 'w') as f:
f.write('Logfile initialized\n')
except:
print "[ERROR] error initializing (writing to) log file %s: %s" % (_logfile, sys.exc_info()[0])
self.info("logfile = " + logfile)
def _level_prefix(self, level):
return {
TiLogger.ERROR: "ERROR",
TiLogger.WARN: "WARN",
TiLogger.INFO: "INFO",
TiLogger.DEBUG: "DEBUG",
TiLogger.TRACE: "TRACE"
}[level];
def _log(self, msg, level):
global _logfile
if self.level >= level:
prefix = self._level_prefix(level)
line = "[%s] %s" % (prefix, msg)
print >> self.output_stream, line
self.output_stream.flush()
sys.stdout.flush()
if _logfile is not None:
try:
with open(_logfile, 'a') as f:
f.write("%s\n" % line)
except:
print "[ERROR] error writing to log %s: %s" % (_logfile, sys.exc_info()[0])
def info(self, msg):
self._log(msg, TiLogger.INFO)
def debug(self, msg):
self._log(msg, TiLogger.DEBUG)
def warn(self, msg):
self._log(msg, TiLogger.WARN)
def trace(self, msg):
self._log(msg, TiLogger.TRACE)
def error(self, msg):
self._log(msg, TiLogger.ERROR)
# if __name__ == "__main__":
# _logfile = ''
# print "[DEBUG] TiLogger initialized"
| apache-2.0 | -177,229,503,595,410,940 | 23.746479 | 99 | 0.622652 | false |
subhacom/moose-core | tests/python/test_function.py | 2 | 2728 | # test_function.py ---
#
# Filename: test_function.py
# Description:
# Author: subha
# Maintainer:
# Created: Sat Mar 28 19:34:20 2015 (-0400)
# Version:
# Last-Updated:
# By:
# Update #: 0
# URL:
# Keywords:
# Compatibility:
#
#
# Commentary:
#
#
#
#
# Change log:
#
#
#
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street, Fifth
# Floor, Boston, MA 02110-1301, USA.
#
#
# Code:
"""Check variable ordering - bug #161"""
from __future__ import print_function
import numpy as np
import moose
def test_var_order():
"""The y values are one step behind the x values because of
scheduling sequences"""
nsteps = 5
simtime = nsteps
dt = 1.0
# fn0 = moose.Function('/fn0')
# fn0.x.num = 2
# fn0.expr = 'x0 + x1'
# fn0.mode = 1
fn1 = moose.Function('/fn1')
fn1.x.num = 2
fn1.expr = 'y1 + y0 + x1 + x0'
fn1.mode = 1
inputs = np.arange(0, nsteps+1, 1.0)
x0 = moose.StimulusTable('/x0')
x0.vector = inputs
x0.startTime = 0.0
x0.stopTime = simtime
x0.stepPosition = 0.0
inputs /= 10
x1 = moose.StimulusTable('/x1')
x1.vector = inputs
x1.startTime = 0.0
x1.stopTime = simtime
x1.stepPosition = 0.0
inputs /= 10
y0 = moose.StimulusTable('/y0')
y0.vector = inputs
y0.startTime = 0.0
y0.stopTime = simtime
y0.stepPosition = 0.0
inputs /= 10
y1 = moose.StimulusTable('/y1')
y1.vector = inputs
y1.startTime = 0.0
y1.startTime = 0.0
y1.stopTime = simtime
y1.stepPosition = 0.0
moose.connect(x0, 'output', fn1.x[0], 'input')
moose.connect(x1, 'output', fn1.x[1], 'input')
moose.connect(fn1, 'requestOut', y0, 'getOutputValue')
moose.connect(fn1, 'requestOut', y1, 'getOutputValue')
z1 = moose.Table('/z1')
moose.connect(z1, 'requestOut', fn1, 'getValue')
for ii in range(32):
moose.setClock(ii, dt)
moose.reinit()
moose.start(simtime)
for ii in range(len(z1.vector)):
print(ii, z1.vector[ii])
if __name__ == '__main__':
test_var_order()
#
# test_function.py ends here
| gpl-3.0 | 6,557,834,612,652,690,000 | 23.8 | 67 | 0.631598 | false |
remybaranx/qtaste | demo/Testbeds/ControlScripts/controlscript_addon.py | 1 | 2642 | ##
# Control script Addon jython module.
#
# This module contains extention of the ControlScript class:
# - VirtualBox: this extention class is to be used to control Sun VirtualBox images.
##
from controlscript import *
import time
class ControlScriptAddon(ControlScript):
""" Control script Addon"""
def __init__(self, controlActions):
"""
Initialize ControlScript object.
Store controlActions in self.controlActions,
store additional command-line arguments (arguments except first one) in self.arguments,
store TESTBED environment variable in self.testbed,
and execute start() or stop() following the value of the first command-line argument (must be 'start' or 'stop')
@param controlActions sequence of ControlAction (list or tuple)
"""
ControlScript.__init__(self, controlActions)
class VirtualBox(ControlAction):
""" Control script action for starting/stopping a Virtual Box image """
def __init__(self, description, nameOfVBoxImage, active=True):
"""
Initialize VirtualBox object
@param description control script action description, also used as window title
@param nameOfVBoxImage the sun virtual box image id to be started
@param args arguments to pass to the application or None if no argument
@param workingDir working directory to start process in, defaults to QTaste root directory
"""
ControlAction.__init__(self, description, active)
self.callerScript = traceback.format_stack()[0].split("\"")[1]
self.nameOfVBoxImage = nameOfVBoxImage
def dumpDataType(self, prefix, writer):
""" Method called on start. It dumps the data type. to be overridden by subclasses """
super(VirtualBox, self).dumpDataType(prefix, writer)
writer.write(prefix + ".nameOfVBoxImage=string\n")
def dump(self, writer):
""" Method called on start. It dump the control action parameter in the writer, to be overridden by subclasses """
super(VirtualBox, self).dump(writer)
writer.write(str(self.caID) + ".nameOfVBoxImage=\"" + str(self.nameOfVBoxImage) + "\"\n")
def start(self):
# the VBoxManage command has to be in the PATH ...
commandArguments = ['VBoxManage','startvm',self.nameOfVBoxImage]
print "Starting " + self.description + "..."
print commandArguments;
self.executeCommand(commandArguments);
time.sleep(30)
print
def stop(self):
commandArguments = ['VBoxManage', 'controlvm', self.nameOfVBoxImage, 'poweroff']
print "Stopping " + self.description + "..."
print commandArguments;
self.executeCommand(commandArguments);
commandArguments = ['VBoxManage', 'snapshot', self.nameOfVBoxImage, 'restorecurrent']
self.executeCommand(commandArguments);
print
| gpl-3.0 | 6,501,847,338,255,054,000 | 40.28125 | 116 | 0.745647 | false |
colbyga/pychess | lib/pychess/Utils/Piece.py | 22 | 1197 | from pychess.Utils.const import KING, QUEEN, ROOK, BISHOP, KNIGHT, PAWN
from pychess.Utils.repr import reprSign, reprColor, reprPiece
class Piece:
def __init__ (self, color, piece, captured=False):
self.color = color
self.piece = piece
self.captured = captured
# in crazyhouse we need to know this for later captures
self.promoted = False
self.opacity = 1.0
self.x = None
self.y = None
# Sign is a deprecated synonym for piece
def _set_sign (self, sign):
self.piece = sign
def _get_sign (self):
return self.piece
sign = property(_get_sign, _set_sign)
def __repr__ (self):
represen = "<%s %s" % (reprColor[self.color], reprPiece[self.piece])
if self.opacity != 1.0:
represen += " Op:%0.1f" % self.opacity
if self.x != None or self.y != None:
if self.x != None:
represen += " X:%0.1f" % self.x
else: represen += " X:None"
if self.y != None:
represen += " Y:%0.1f" % self.y
else: represen += " Y:None"
represen += ">"
return represen
| gpl-3.0 | 4,497,102,088,175,651,300 | 31.351351 | 76 | 0.53467 | false |
mmt/deeprl22 | hw2/frozen_lake.py | 7 | 4392 | import numpy as np
import sys
from six import StringIO, b
from gym import utils
import discrete_env
LEFT = 0
DOWN = 1
RIGHT = 2
UP = 3
MAPS = {
"4x4": [
"SFFF",
"FHFH",
"FFFH",
"HFFG"
],
"8x8": [
"SFFFFFFF",
"FFFFFFFF",
"FFFHFFFF",
"FFFFFHFF",
"FFFHFFFF",
"FHHFFFHF",
"FHFFHFHF",
"FFFHFFFG"
],
}
class FrozenLakeEnv(discrete_env.DiscreteEnv):
"""
Winter is here. You and your friends were tossing around a frisbee at the park
when you made a wild throw that left the frisbee out in the middle of the lake.
The water is mostly frozen, but there are a few holes where the ice has melted.
If you step into one of those holes, you'll fall into the freezing water.
At this time, there's an international frisbee shortage, so it's absolutely imperative that
you navigate across the lake and retrieve the disc.
However, the ice is slippery, so you won't always move in the direction you intend.
The surface is described using a grid like the following
SFFF
FHFH
FFFH
HFFG
S : starting point, safe
F : frozen surface, safe
H : hole, fall to your doom
G : goal, where the frisbee is located
The episode ends when you reach the goal or fall in a hole.
You receive a reward of 1 if you reach the goal, and zero otherwise.
"""
metadata = {'render.modes': ['human', 'ansi']}
def __init__(self, desc=None, map_name="4x4",is_slippery=True):
if desc is None and map_name is None:
raise ValueError('Must provide either desc or map_name')
elif desc is None:
desc = MAPS[map_name]
self.desc = desc = np.asarray(desc,dtype='c')
self.nrow, self.ncol = nrow, ncol = desc.shape
nA = 4
nS = nrow * ncol
isd = np.array(desc == b'S').astype('float64').ravel()
isd /= isd.sum()
P = {s : {a : [] for a in range(nA)} for s in range(nS)}
def to_s(row, col):
return row*ncol + col
def inc(row, col, a):
if a==0: # left
col = max(col-1,0)
elif a==1: # down
row = min(row+1,nrow-1)
elif a==2: # right
col = min(col+1,ncol-1)
elif a==3: # up
row = max(row-1,0)
return (row, col)
for row in range(nrow):
for col in range(ncol):
s = to_s(row, col)
for a in range(4):
li = P[s][a]
letter = desc[row, col]
if letter in b'GH':
li.append((1.0, s, 0, True))
else:
if is_slippery:
for b in [(a-1)%4, a, (a+1)%4]:
newrow, newcol = inc(row, col, b)
newstate = to_s(newrow, newcol)
newletter = desc[newrow, newcol]
done = bytes(newletter) in b'GH'
rew = float(newletter == b'G')
li.append((0.8 if b==a else 0.1, newstate, rew, done))
else:
newrow, newcol = inc(row, col, a)
newstate = to_s(newrow, newcol)
newletter = desc[newrow, newcol]
done = bytes(newletter) in b'GH'
rew = float(newletter == b'G')
li.append((1.0, newstate, rew, done))
super(FrozenLakeEnv, self).__init__(nS, nA, P, isd)
def _render(self, mode='human', close=False):
if close:
return
outfile = StringIO() if mode == 'ansi' else sys.stdout
row, col = self.s // self.ncol, self.s % self.ncol
desc = self.desc.tolist()
desc = [[c.decode('utf-8') for c in line] for line in desc]
desc[row][col] = utils.colorize(desc[row][col], "red", highlight=True)
if self.lastaction is not None:
outfile.write(" ({})\n".format(["Left","Down","Right","Up"][self.lastaction]))
else:
outfile.write("\n")
outfile.write("\n".join(''.join(line) for line in desc)+"\n")
return outfile
| mit | 4,770,754,171,500,952,000 | 32.526718 | 95 | 0.497495 | false |
felipenaselva/felipe.repository | plugin.video.mrpiracy/resources/lib/AADecoder.py | 14 | 8517 | # -*- coding: utf-8 -*-
# ------------------------------------------------------------
# pelisalacarta - XBMC Plugin
# Conector for openload.io
# http://blog.tvalacarta.info/plugin-xbmc/pelisalacarta/
# by DrZ3r0
# ------------------------------------------------------------
# Modified by Shani
import re
class AADecoder(object):
def __init__(self, aa_encoded_data):
self.encoded_str = aa_encoded_data.replace('/*´∇`*/', '')
self.b = ["(c^_^o)", "(゚Θ゚)", "((o^_^o) - (゚Θ゚))", "(o^_^o)",
"(゚ー゚)", "((゚ー゚) + (゚Θ゚))", "((o^_^o) +(o^_^o))", "((゚ー゚) + (o^_^o))",
"((゚ー゚) + (゚ー゚))", "((゚ー゚) + (゚ー゚) + (゚Θ゚))", "(゚Д゚) .゚ω゚ノ", "(゚Д゚) .゚Θ゚ノ",
"(゚Д゚) ['c']", "(゚Д゚) .゚ー゚ノ", "(゚Д゚) .゚Д゚ノ", "(゚Д゚) [゚Θ゚]"]
def is_aaencoded(self):
idx = self.encoded_str.find("゚ω゚ノ= /`m´)ノ ~┻━┻ //*´∇`*/ ['_']; o=(゚ー゚) =_=3; c=(゚Θ゚) =(゚ー゚)-(゚ー゚); ")
if idx == -1:
return False
is_encoded = self.encoded_str.find("(゚Д゚)[゚o゚]) (゚Θ゚)) ('_');", idx) != -1
return is_encoded
def base_repr(self, number, base=2, padding=0):
digits = '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ'
if base > len(digits):
base = len(digits)
num = abs(number)
res = []
while num:
res.append(digits[num % base])
num //= base
if padding:
res.append('0' * padding)
if number < 0:
res.append('-')
return ''.join(reversed(res or '0'))
def decode_char(self, enc_char, radix):
end_char = "+ "
str_char = ""
while enc_char != '':
found = False
# for i in range(len(self.b)):
# print self.b[i], enc_char.find(self.b[i])
# if enc_char.find(self.b[i]) == 0:
# str_char += self.base_repr(i, radix)
# enc_char = enc_char[len(self.b[i]):]
# found = True
# break
# print 'found', found, enc_char
if not found:
for i in range(len(self.b)):
enc_char = enc_char.replace(self.b[i], str(i))
# enc_char = enc_char.replace('(゚Θ゚)', '1').replace('(゚ー゚)', '4').replace('(c^_^o)', '0').replace('(o^_^o)', '3')
# print 'enc_char', enc_char
startpos = 0
findClose = True
balance = 1
result = []
if enc_char.startswith('('):
l = 0
for t in enc_char[1:]:
l += 1
# print 'looping', findClose, startpos, t, balance
if findClose and t == ')':
balance -= 1
if balance == 0:
result += [enc_char[startpos:l + 1]]
findClose = False
continue
elif not findClose and t == '(':
startpos = l
findClose = True
balance = 1
continue
elif t == '(':
balance += 1
if result is None or len(result) == 0:
return ""
else:
for r in result:
value = self.decode_digit(r, radix)
# print 'va', value
str_char += value
if value == "":
return ""
return str_char
enc_char = enc_char[len(end_char):]
return str_char
def parseJSString(self, s):
try:
# print s
# offset = 1 if s[0] == '+' else 0
tmp = (s.replace('!+[]', '1').replace('!![]', '1').replace('[]', '0')) # .replace('(','str(')[offset:])
val = int(eval(tmp))
return val
except:
pass
def decode_digit(self, enc_int, radix):
# enc_int = enc_int.replace('(゚Θ゚)', '1').replace('(゚ー゚)', '4').replace('(c^_^o)', '0').replace('(o^_^o)', '3')
# print 'enc_int before', enc_int
# for i in range(len(self.b)):
# print self.b[i], enc_char.find(self.b[i])
# if enc_char.find(self.b[i]) > 0:
# str_char += self.base_repr(i, radix)
# enc_char = enc_char[len(self.b[i]):]
# found = True
# break
# enc_int=enc_int.replace(self.b[i], str(i))
# print 'enc_int before', enc_int
try:
return str(eval(enc_int))
except: pass
rr = '(\(.+?\)\))\+'
rerr = enc_int.split('))+') # re.findall(rr, enc_int)
v = ""
# print rerr
for c in rerr:
if len(c) > 0:
# print 'v', c
if c.strip().endswith('+'):
c = c.strip()[:-1]
# print 'v', c
startbrackets = len(c) - len(c.replace('(', ''))
endbrackets = len(c) - len(c.replace(')', ''))
if startbrackets > endbrackets:
c += ')' * (startbrackets - endbrackets)
if '[' in c:
v += str(self.parseJSString(c))
else:
# print c
v += str(eval(c))
return v
# unreachable code
# mode 0=+, 1=-
# mode = 0
# value = 0
# while enc_int != '':
# found = False
# for i in range(len(self.b)):
# if enc_int.find(self.b[i]) == 0:
# if mode == 0:
# value += i
# else:
# value -= i
# enc_int = enc_int[len(self.b[i]):]
# found = True
# break
# if not found:
# return ""
# enc_int = re.sub('^\s+|\s+$', '', enc_int)
# if enc_int.find("+") == 0:
# mode = 0
# else:
# mode = 1
# enc_int = enc_int[1:]
# enc_int = re.sub('^\s+|\s+$', '', enc_int)
# return self.base_repr(value, radix)
def decode(self):
self.encoded_str = re.sub('^\s+|\s+$', '', self.encoded_str)
# get data
pattern = (r"\(゚Д゚\)\[゚o゚\]\+ (.+?)\(゚Д゚\)\[゚o゚\]\)")
result = re.search(pattern, self.encoded_str, re.DOTALL)
if result is None:
print "AADecoder: data not found"
return False
data = result.group(1)
# hex decode string
begin_char = "(゚Д゚)[゚ε゚]+"
alt_char = "(o゚ー゚o)+ "
out = ''
# print data
while data != '':
# Check new char
if data.find(begin_char) != 0:
print "AADecoder: data not found"
return False
data = data[len(begin_char):]
# Find encoded char
enc_char = ""
if data.find(begin_char) == -1:
enc_char = data
data = ""
else:
enc_char = data[:data.find(begin_char)]
data = data[len(enc_char):]
radix = 8
# Detect radix 16 for utf8 char
if enc_char.find(alt_char) == 0:
enc_char = enc_char[len(alt_char):]
radix = 16
# print repr(enc_char), radix
# print enc_char.replace('(゚Θ゚)', '1').replace('(゚ー゚)', '4').replace('(c^_^o)', '0').replace('(o^_^o)', '3')
# print 'The CHAR', enc_char, radix
str_char = self.decode_char(enc_char, radix)
if str_char == "":
print "no match : "
print data + "\nout = " + out + "\n"
return False
# print 'sofar', str_char, radix,out
out += chr(int(str_char, radix))
# print 'sfar', chr(int(str_char, radix)), out
if out == "":
print "no match : " + data
return False
return out
| gpl-2.0 | 8,912,127,611,122,123,000 | 32.971193 | 129 | 0.389582 | false |
pedrobaeza/project-service | project_baseuser/__openerp__.py | 2 | 3592 | # -*- coding: utf-8 -*-
##############################################################################
#
# Daniel Reis, 2013
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Projects extensions for user roles',
'version': '1.0',
'category': 'Project Management',
'summary': 'Extend Project user roles to support more complex use cases',
'description': """\
Employees are now basic Project users, able to create new documents (Issues
or Tasks). These are kept editable while in New and Cancelled states, to
allow for corrections or for the user himself to cancel an incorretly
created request.
Previously, Employee users did not have any write nor craete access to project
documents.
Project Users, on the other hand, are supposed to act on these documents,
sucha as reported issues, and update them accordingly, so they have write
access for all states. Employee users don't have write access on later states,
but can still write comments and communicate through the message board (open
chatter).
In general, users will only be able to see documents where:
* They are assigned/responsible for, or
* They are following, or
* They are a team member for the corresponding Project (but not if only in
the project's follower list).
Project Managers have access rules similar to Project Users, but additionally
can create new projects and can see all documents for the projects they are
the Manager.
As a consequence, Project Managers no longer have inconditional access to all
Tasks and Issues, and will only be able to edit the definitions of Projects
they manage.
This makes it possible for a Project Manager to have private projects that
other users, Project Managers inlcuded, will not be able to see. They will
need to be added as followers or team members to able to see it.
Public Projects and their documents are still visible to everyone.
Portal users access rules are kept unchanged.
---------------------
Access Rules summary:
---------------------
Employee Users
Can see only documents followed or responebile for (in "user_id").
Can create new documents and edit them while in "New"/"Cancelled" states.
Project Users
Can edit Project Issues and Tasks in any stage/state.
Can see all documents for projects they are followers on team memebers.
Can see only documents followed or assigned to for other projects.
Project Managers
Can create new projects and edit their attributes.
Can see all documents (Tasks or Issues) but only for their managed
projects.
For the other Projects, will see only followed documents, just like the
other users.
""",
'author': 'Daniel Reis',
'depends': [
'project',
],
'data': [
'project_view.xml',
'security/ir.model.access.csv',
'security/project_security.xml',
],
'installable': True,
}
| agpl-3.0 | -2,656,036,770,505,278,000 | 37.212766 | 79 | 0.697105 | false |
40123237/w17test | static/Brython3.1.0-20150301-090019/Lib/unittest/test/test_setups.py | 791 | 16440 | import io
import sys
import unittest
def resultFactory(*_):
return unittest.TestResult()
class TestSetups(unittest.TestCase):
def getRunner(self):
return unittest.TextTestRunner(resultclass=resultFactory,
stream=io.StringIO())
def runTests(self, *cases):
suite = unittest.TestSuite()
for case in cases:
tests = unittest.defaultTestLoader.loadTestsFromTestCase(case)
suite.addTests(tests)
runner = self.getRunner()
# creating a nested suite exposes some potential bugs
realSuite = unittest.TestSuite()
realSuite.addTest(suite)
# adding empty suites to the end exposes potential bugs
suite.addTest(unittest.TestSuite())
realSuite.addTest(unittest.TestSuite())
return runner.run(realSuite)
def test_setup_class(self):
class Test(unittest.TestCase):
setUpCalled = 0
@classmethod
def setUpClass(cls):
Test.setUpCalled += 1
unittest.TestCase.setUpClass()
def test_one(self):
pass
def test_two(self):
pass
result = self.runTests(Test)
self.assertEqual(Test.setUpCalled, 1)
self.assertEqual(result.testsRun, 2)
self.assertEqual(len(result.errors), 0)
def test_teardown_class(self):
class Test(unittest.TestCase):
tearDownCalled = 0
@classmethod
def tearDownClass(cls):
Test.tearDownCalled += 1
unittest.TestCase.tearDownClass()
def test_one(self):
pass
def test_two(self):
pass
result = self.runTests(Test)
self.assertEqual(Test.tearDownCalled, 1)
self.assertEqual(result.testsRun, 2)
self.assertEqual(len(result.errors), 0)
def test_teardown_class_two_classes(self):
class Test(unittest.TestCase):
tearDownCalled = 0
@classmethod
def tearDownClass(cls):
Test.tearDownCalled += 1
unittest.TestCase.tearDownClass()
def test_one(self):
pass
def test_two(self):
pass
class Test2(unittest.TestCase):
tearDownCalled = 0
@classmethod
def tearDownClass(cls):
Test2.tearDownCalled += 1
unittest.TestCase.tearDownClass()
def test_one(self):
pass
def test_two(self):
pass
result = self.runTests(Test, Test2)
self.assertEqual(Test.tearDownCalled, 1)
self.assertEqual(Test2.tearDownCalled, 1)
self.assertEqual(result.testsRun, 4)
self.assertEqual(len(result.errors), 0)
def test_error_in_setupclass(self):
class BrokenTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
raise TypeError('foo')
def test_one(self):
pass
def test_two(self):
pass
result = self.runTests(BrokenTest)
self.assertEqual(result.testsRun, 0)
self.assertEqual(len(result.errors), 1)
error, _ = result.errors[0]
self.assertEqual(str(error),
'setUpClass (%s.BrokenTest)' % __name__)
def test_error_in_teardown_class(self):
class Test(unittest.TestCase):
tornDown = 0
@classmethod
def tearDownClass(cls):
Test.tornDown += 1
raise TypeError('foo')
def test_one(self):
pass
def test_two(self):
pass
class Test2(unittest.TestCase):
tornDown = 0
@classmethod
def tearDownClass(cls):
Test2.tornDown += 1
raise TypeError('foo')
def test_one(self):
pass
def test_two(self):
pass
result = self.runTests(Test, Test2)
self.assertEqual(result.testsRun, 4)
self.assertEqual(len(result.errors), 2)
self.assertEqual(Test.tornDown, 1)
self.assertEqual(Test2.tornDown, 1)
error, _ = result.errors[0]
self.assertEqual(str(error),
'tearDownClass (%s.Test)' % __name__)
def test_class_not_torndown_when_setup_fails(self):
class Test(unittest.TestCase):
tornDown = False
@classmethod
def setUpClass(cls):
raise TypeError
@classmethod
def tearDownClass(cls):
Test.tornDown = True
raise TypeError('foo')
def test_one(self):
pass
self.runTests(Test)
self.assertFalse(Test.tornDown)
def test_class_not_setup_or_torndown_when_skipped(self):
class Test(unittest.TestCase):
classSetUp = False
tornDown = False
@classmethod
def setUpClass(cls):
Test.classSetUp = True
@classmethod
def tearDownClass(cls):
Test.tornDown = True
def test_one(self):
pass
Test = unittest.skip("hop")(Test)
self.runTests(Test)
self.assertFalse(Test.classSetUp)
self.assertFalse(Test.tornDown)
def test_setup_teardown_order_with_pathological_suite(self):
results = []
class Module1(object):
@staticmethod
def setUpModule():
results.append('Module1.setUpModule')
@staticmethod
def tearDownModule():
results.append('Module1.tearDownModule')
class Module2(object):
@staticmethod
def setUpModule():
results.append('Module2.setUpModule')
@staticmethod
def tearDownModule():
results.append('Module2.tearDownModule')
class Test1(unittest.TestCase):
@classmethod
def setUpClass(cls):
results.append('setup 1')
@classmethod
def tearDownClass(cls):
results.append('teardown 1')
def testOne(self):
results.append('Test1.testOne')
def testTwo(self):
results.append('Test1.testTwo')
class Test2(unittest.TestCase):
@classmethod
def setUpClass(cls):
results.append('setup 2')
@classmethod
def tearDownClass(cls):
results.append('teardown 2')
def testOne(self):
results.append('Test2.testOne')
def testTwo(self):
results.append('Test2.testTwo')
class Test3(unittest.TestCase):
@classmethod
def setUpClass(cls):
results.append('setup 3')
@classmethod
def tearDownClass(cls):
results.append('teardown 3')
def testOne(self):
results.append('Test3.testOne')
def testTwo(self):
results.append('Test3.testTwo')
Test1.__module__ = Test2.__module__ = 'Module'
Test3.__module__ = 'Module2'
sys.modules['Module'] = Module1
sys.modules['Module2'] = Module2
first = unittest.TestSuite((Test1('testOne'),))
second = unittest.TestSuite((Test1('testTwo'),))
third = unittest.TestSuite((Test2('testOne'),))
fourth = unittest.TestSuite((Test2('testTwo'),))
fifth = unittest.TestSuite((Test3('testOne'),))
sixth = unittest.TestSuite((Test3('testTwo'),))
suite = unittest.TestSuite((first, second, third, fourth, fifth, sixth))
runner = self.getRunner()
result = runner.run(suite)
self.assertEqual(result.testsRun, 6)
self.assertEqual(len(result.errors), 0)
self.assertEqual(results,
['Module1.setUpModule', 'setup 1',
'Test1.testOne', 'Test1.testTwo', 'teardown 1',
'setup 2', 'Test2.testOne', 'Test2.testTwo',
'teardown 2', 'Module1.tearDownModule',
'Module2.setUpModule', 'setup 3',
'Test3.testOne', 'Test3.testTwo',
'teardown 3', 'Module2.tearDownModule'])
def test_setup_module(self):
class Module(object):
moduleSetup = 0
@staticmethod
def setUpModule():
Module.moduleSetup += 1
class Test(unittest.TestCase):
def test_one(self):
pass
def test_two(self):
pass
Test.__module__ = 'Module'
sys.modules['Module'] = Module
result = self.runTests(Test)
self.assertEqual(Module.moduleSetup, 1)
self.assertEqual(result.testsRun, 2)
self.assertEqual(len(result.errors), 0)
def test_error_in_setup_module(self):
class Module(object):
moduleSetup = 0
moduleTornDown = 0
@staticmethod
def setUpModule():
Module.moduleSetup += 1
raise TypeError('foo')
@staticmethod
def tearDownModule():
Module.moduleTornDown += 1
class Test(unittest.TestCase):
classSetUp = False
classTornDown = False
@classmethod
def setUpClass(cls):
Test.classSetUp = True
@classmethod
def tearDownClass(cls):
Test.classTornDown = True
def test_one(self):
pass
def test_two(self):
pass
class Test2(unittest.TestCase):
def test_one(self):
pass
def test_two(self):
pass
Test.__module__ = 'Module'
Test2.__module__ = 'Module'
sys.modules['Module'] = Module
result = self.runTests(Test, Test2)
self.assertEqual(Module.moduleSetup, 1)
self.assertEqual(Module.moduleTornDown, 0)
self.assertEqual(result.testsRun, 0)
self.assertFalse(Test.classSetUp)
self.assertFalse(Test.classTornDown)
self.assertEqual(len(result.errors), 1)
error, _ = result.errors[0]
self.assertEqual(str(error), 'setUpModule (Module)')
def test_testcase_with_missing_module(self):
class Test(unittest.TestCase):
def test_one(self):
pass
def test_two(self):
pass
Test.__module__ = 'Module'
sys.modules.pop('Module', None)
result = self.runTests(Test)
self.assertEqual(result.testsRun, 2)
def test_teardown_module(self):
class Module(object):
moduleTornDown = 0
@staticmethod
def tearDownModule():
Module.moduleTornDown += 1
class Test(unittest.TestCase):
def test_one(self):
pass
def test_two(self):
pass
Test.__module__ = 'Module'
sys.modules['Module'] = Module
result = self.runTests(Test)
self.assertEqual(Module.moduleTornDown, 1)
self.assertEqual(result.testsRun, 2)
self.assertEqual(len(result.errors), 0)
def test_error_in_teardown_module(self):
class Module(object):
moduleTornDown = 0
@staticmethod
def tearDownModule():
Module.moduleTornDown += 1
raise TypeError('foo')
class Test(unittest.TestCase):
classSetUp = False
classTornDown = False
@classmethod
def setUpClass(cls):
Test.classSetUp = True
@classmethod
def tearDownClass(cls):
Test.classTornDown = True
def test_one(self):
pass
def test_two(self):
pass
class Test2(unittest.TestCase):
def test_one(self):
pass
def test_two(self):
pass
Test.__module__ = 'Module'
Test2.__module__ = 'Module'
sys.modules['Module'] = Module
result = self.runTests(Test, Test2)
self.assertEqual(Module.moduleTornDown, 1)
self.assertEqual(result.testsRun, 4)
self.assertTrue(Test.classSetUp)
self.assertTrue(Test.classTornDown)
self.assertEqual(len(result.errors), 1)
error, _ = result.errors[0]
self.assertEqual(str(error), 'tearDownModule (Module)')
def test_skiptest_in_setupclass(self):
class Test(unittest.TestCase):
@classmethod
def setUpClass(cls):
raise unittest.SkipTest('foo')
def test_one(self):
pass
def test_two(self):
pass
result = self.runTests(Test)
self.assertEqual(result.testsRun, 0)
self.assertEqual(len(result.errors), 0)
self.assertEqual(len(result.skipped), 1)
skipped = result.skipped[0][0]
self.assertEqual(str(skipped), 'setUpClass (%s.Test)' % __name__)
def test_skiptest_in_setupmodule(self):
class Test(unittest.TestCase):
def test_one(self):
pass
def test_two(self):
pass
class Module(object):
@staticmethod
def setUpModule():
raise unittest.SkipTest('foo')
Test.__module__ = 'Module'
sys.modules['Module'] = Module
result = self.runTests(Test)
self.assertEqual(result.testsRun, 0)
self.assertEqual(len(result.errors), 0)
self.assertEqual(len(result.skipped), 1)
skipped = result.skipped[0][0]
self.assertEqual(str(skipped), 'setUpModule (Module)')
def test_suite_debug_executes_setups_and_teardowns(self):
ordering = []
class Module(object):
@staticmethod
def setUpModule():
ordering.append('setUpModule')
@staticmethod
def tearDownModule():
ordering.append('tearDownModule')
class Test(unittest.TestCase):
@classmethod
def setUpClass(cls):
ordering.append('setUpClass')
@classmethod
def tearDownClass(cls):
ordering.append('tearDownClass')
def test_something(self):
ordering.append('test_something')
Test.__module__ = 'Module'
sys.modules['Module'] = Module
suite = unittest.defaultTestLoader.loadTestsFromTestCase(Test)
suite.debug()
expectedOrder = ['setUpModule', 'setUpClass', 'test_something', 'tearDownClass', 'tearDownModule']
self.assertEqual(ordering, expectedOrder)
def test_suite_debug_propagates_exceptions(self):
class Module(object):
@staticmethod
def setUpModule():
if phase == 0:
raise Exception('setUpModule')
@staticmethod
def tearDownModule():
if phase == 1:
raise Exception('tearDownModule')
class Test(unittest.TestCase):
@classmethod
def setUpClass(cls):
if phase == 2:
raise Exception('setUpClass')
@classmethod
def tearDownClass(cls):
if phase == 3:
raise Exception('tearDownClass')
def test_something(self):
if phase == 4:
raise Exception('test_something')
Test.__module__ = 'Module'
sys.modules['Module'] = Module
_suite = unittest.defaultTestLoader.loadTestsFromTestCase(Test)
suite = unittest.TestSuite()
suite.addTest(_suite)
messages = ('setUpModule', 'tearDownModule', 'setUpClass', 'tearDownClass', 'test_something')
for phase, msg in enumerate(messages):
with self.assertRaisesRegex(Exception, msg):
suite.debug()
if __name__ == '__main__':
unittest.main()
| gpl-3.0 | -7,169,274,866,471,545,000 | 31.426036 | 106 | 0.535949 | false |
tpaszkowski/quantum | quantum/openstack/common/uuidutils.py | 159 | 1106 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2012 Intel Corporation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
UUID related utilities and helper functions.
"""
import uuid
def generate_uuid():
return str(uuid.uuid4())
def is_uuid_like(val):
"""Returns validation of a value as a UUID.
For our purposes, a UUID is a canonical form string:
aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa
"""
try:
return str(uuid.UUID(val)) == val
except (TypeError, ValueError, AttributeError):
return False
| apache-2.0 | -7,504,865,618,342,483 | 27.358974 | 78 | 0.700723 | false |
Urvik08/ns3-gpcr | src/bridge/bindings/modulegen__gcc_ILP32.py | 28 | 168219 | from pybindgen import Module, FileCodeSink, param, retval, cppclass, typehandlers
import pybindgen.settings
import warnings
class ErrorHandler(pybindgen.settings.ErrorHandler):
def handle_error(self, wrapper, exception, traceback_):
warnings.warn("exception %r in wrapper %s" % (exception, wrapper))
return True
pybindgen.settings.error_handler = ErrorHandler()
import sys
def module_init():
root_module = Module('ns.bridge', cpp_namespace='::ns3')
return root_module
def register_types(module):
root_module = module.get_root()
## address.h (module 'network'): ns3::Address [class]
module.add_class('Address', import_from_module='ns.network')
## address.h (module 'network'): ns3::Address::MaxSize_e [enumeration]
module.add_enum('MaxSize_e', ['MAX_SIZE'], outer_class=root_module['ns3::Address'], import_from_module='ns.network')
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList [class]
module.add_class('AttributeConstructionList', import_from_module='ns.core')
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item [struct]
module.add_class('Item', import_from_module='ns.core', outer_class=root_module['ns3::AttributeConstructionList'])
## bridge-helper.h (module 'bridge'): ns3::BridgeHelper [class]
module.add_class('BridgeHelper')
## callback.h (module 'core'): ns3::CallbackBase [class]
module.add_class('CallbackBase', import_from_module='ns.core')
## ipv4-address.h (module 'network'): ns3::Ipv4Address [class]
module.add_class('Ipv4Address', import_from_module='ns.network')
## ipv4-address.h (module 'network'): ns3::Ipv4Address [class]
root_module['ns3::Ipv4Address'].implicitly_converts_to(root_module['ns3::Address'])
## ipv4-address.h (module 'network'): ns3::Ipv4Mask [class]
module.add_class('Ipv4Mask', import_from_module='ns.network')
## ipv6-address.h (module 'network'): ns3::Ipv6Address [class]
module.add_class('Ipv6Address', import_from_module='ns.network')
## ipv6-address.h (module 'network'): ns3::Ipv6Address [class]
root_module['ns3::Ipv6Address'].implicitly_converts_to(root_module['ns3::Address'])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix [class]
module.add_class('Ipv6Prefix', import_from_module='ns.network')
## mac48-address.h (module 'network'): ns3::Mac48Address [class]
module.add_class('Mac48Address', import_from_module='ns.network')
## mac48-address.h (module 'network'): ns3::Mac48Address [class]
root_module['ns3::Mac48Address'].implicitly_converts_to(root_module['ns3::Address'])
## net-device-container.h (module 'network'): ns3::NetDeviceContainer [class]
module.add_class('NetDeviceContainer', import_from_module='ns.network')
## object-base.h (module 'core'): ns3::ObjectBase [class]
module.add_class('ObjectBase', allow_subclassing=True, import_from_module='ns.core')
## object.h (module 'core'): ns3::ObjectDeleter [struct]
module.add_class('ObjectDeleter', import_from_module='ns.core')
## object-factory.h (module 'core'): ns3::ObjectFactory [class]
module.add_class('ObjectFactory', import_from_module='ns.core')
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter> [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::Object', 'ns3::ObjectBase', 'ns3::ObjectDeleter'], parent=root_module['ns3::ObjectBase'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## tag-buffer.h (module 'network'): ns3::TagBuffer [class]
module.add_class('TagBuffer', import_from_module='ns.network')
## type-id.h (module 'core'): ns3::TypeId [class]
module.add_class('TypeId', import_from_module='ns.core')
## type-id.h (module 'core'): ns3::TypeId::AttributeFlag [enumeration]
module.add_enum('AttributeFlag', ['ATTR_GET', 'ATTR_SET', 'ATTR_CONSTRUCT', 'ATTR_SGC'], outer_class=root_module['ns3::TypeId'], import_from_module='ns.core')
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation [struct]
module.add_class('AttributeInformation', import_from_module='ns.core', outer_class=root_module['ns3::TypeId'])
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation [struct]
module.add_class('TraceSourceInformation', import_from_module='ns.core', outer_class=root_module['ns3::TypeId'])
## empty.h (module 'core'): ns3::empty [class]
module.add_class('empty', import_from_module='ns.core')
## int64x64-double.h (module 'core'): ns3::int64x64_t [class]
module.add_class('int64x64_t', import_from_module='ns.core')
## object.h (module 'core'): ns3::Object [class]
module.add_class('Object', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter >'])
## object.h (module 'core'): ns3::Object::AggregateIterator [class]
module.add_class('AggregateIterator', import_from_module='ns.core', outer_class=root_module['ns3::Object'])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::AttributeAccessor', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeAccessor>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::AttributeChecker', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeChecker>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::AttributeValue', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeValue>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::CallbackImplBase', 'ns3::empty', 'ns3::DefaultDeleter<ns3::CallbackImplBase>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::TraceSourceAccessor', 'ns3::empty', 'ns3::DefaultDeleter<ns3::TraceSourceAccessor>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## nstime.h (module 'core'): ns3::Time [class]
module.add_class('Time', import_from_module='ns.core')
## nstime.h (module 'core'): ns3::Time::Unit [enumeration]
module.add_enum('Unit', ['S', 'MS', 'US', 'NS', 'PS', 'FS', 'LAST'], outer_class=root_module['ns3::Time'], import_from_module='ns.core')
## nstime.h (module 'core'): ns3::Time [class]
root_module['ns3::Time'].implicitly_converts_to(root_module['ns3::int64x64_t'])
## trace-source-accessor.h (module 'core'): ns3::TraceSourceAccessor [class]
module.add_class('TraceSourceAccessor', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >'])
## attribute.h (module 'core'): ns3::AttributeAccessor [class]
module.add_class('AttributeAccessor', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >'])
## attribute.h (module 'core'): ns3::AttributeChecker [class]
module.add_class('AttributeChecker', allow_subclassing=False, automatic_type_narrowing=True, import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >'])
## attribute.h (module 'core'): ns3::AttributeValue [class]
module.add_class('AttributeValue', allow_subclassing=False, automatic_type_narrowing=True, import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >'])
## callback.h (module 'core'): ns3::CallbackChecker [class]
module.add_class('CallbackChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
## callback.h (module 'core'): ns3::CallbackImplBase [class]
module.add_class('CallbackImplBase', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >'])
## callback.h (module 'core'): ns3::CallbackValue [class]
module.add_class('CallbackValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## channel.h (module 'network'): ns3::Channel [class]
module.add_class('Channel', import_from_module='ns.network', parent=root_module['ns3::Object'])
## attribute.h (module 'core'): ns3::EmptyAttributeValue [class]
module.add_class('EmptyAttributeValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressChecker [class]
module.add_class('Ipv4AddressChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressValue [class]
module.add_class('Ipv4AddressValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## ipv4-address.h (module 'network'): ns3::Ipv4MaskChecker [class]
module.add_class('Ipv4MaskChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## ipv4-address.h (module 'network'): ns3::Ipv4MaskValue [class]
module.add_class('Ipv4MaskValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressChecker [class]
module.add_class('Ipv6AddressChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressValue [class]
module.add_class('Ipv6AddressValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixChecker [class]
module.add_class('Ipv6PrefixChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixValue [class]
module.add_class('Ipv6PrefixValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## mac48-address.h (module 'network'): ns3::Mac48AddressChecker [class]
module.add_class('Mac48AddressChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## mac48-address.h (module 'network'): ns3::Mac48AddressValue [class]
module.add_class('Mac48AddressValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## net-device.h (module 'network'): ns3::NetDevice [class]
module.add_class('NetDevice', import_from_module='ns.network', parent=root_module['ns3::Object'])
## net-device.h (module 'network'): ns3::NetDevice::PacketType [enumeration]
module.add_enum('PacketType', ['PACKET_HOST', 'NS3_PACKET_HOST', 'PACKET_BROADCAST', 'NS3_PACKET_BROADCAST', 'PACKET_MULTICAST', 'NS3_PACKET_MULTICAST', 'PACKET_OTHERHOST', 'NS3_PACKET_OTHERHOST'], outer_class=root_module['ns3::NetDevice'], import_from_module='ns.network')
## object-factory.h (module 'core'): ns3::ObjectFactoryChecker [class]
module.add_class('ObjectFactoryChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
## object-factory.h (module 'core'): ns3::ObjectFactoryValue [class]
module.add_class('ObjectFactoryValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## nstime.h (module 'core'): ns3::TimeChecker [class]
module.add_class('TimeChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
## nstime.h (module 'core'): ns3::TimeValue [class]
module.add_class('TimeValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## type-id.h (module 'core'): ns3::TypeIdChecker [class]
module.add_class('TypeIdChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
## type-id.h (module 'core'): ns3::TypeIdValue [class]
module.add_class('TypeIdValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## address.h (module 'network'): ns3::AddressChecker [class]
module.add_class('AddressChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## address.h (module 'network'): ns3::AddressValue [class]
module.add_class('AddressValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## bridge-channel.h (module 'bridge'): ns3::BridgeChannel [class]
module.add_class('BridgeChannel', parent=root_module['ns3::Channel'])
## bridge-net-device.h (module 'bridge'): ns3::BridgeNetDevice [class]
module.add_class('BridgeNetDevice', parent=root_module['ns3::NetDevice'])
## Register a nested module for the namespace FatalImpl
nested_module = module.add_cpp_namespace('FatalImpl')
register_types_ns3_FatalImpl(nested_module)
def register_types_ns3_FatalImpl(module):
root_module = module.get_root()
def register_methods(root_module):
register_Ns3Address_methods(root_module, root_module['ns3::Address'])
register_Ns3AttributeConstructionList_methods(root_module, root_module['ns3::AttributeConstructionList'])
register_Ns3AttributeConstructionListItem_methods(root_module, root_module['ns3::AttributeConstructionList::Item'])
register_Ns3BridgeHelper_methods(root_module, root_module['ns3::BridgeHelper'])
register_Ns3CallbackBase_methods(root_module, root_module['ns3::CallbackBase'])
register_Ns3Ipv4Address_methods(root_module, root_module['ns3::Ipv4Address'])
register_Ns3Ipv4Mask_methods(root_module, root_module['ns3::Ipv4Mask'])
register_Ns3Ipv6Address_methods(root_module, root_module['ns3::Ipv6Address'])
register_Ns3Ipv6Prefix_methods(root_module, root_module['ns3::Ipv6Prefix'])
register_Ns3Mac48Address_methods(root_module, root_module['ns3::Mac48Address'])
register_Ns3NetDeviceContainer_methods(root_module, root_module['ns3::NetDeviceContainer'])
register_Ns3ObjectBase_methods(root_module, root_module['ns3::ObjectBase'])
register_Ns3ObjectDeleter_methods(root_module, root_module['ns3::ObjectDeleter'])
register_Ns3ObjectFactory_methods(root_module, root_module['ns3::ObjectFactory'])
register_Ns3SimpleRefCount__Ns3Object_Ns3ObjectBase_Ns3ObjectDeleter_methods(root_module, root_module['ns3::SimpleRefCount< ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter >'])
register_Ns3TagBuffer_methods(root_module, root_module['ns3::TagBuffer'])
register_Ns3TypeId_methods(root_module, root_module['ns3::TypeId'])
register_Ns3TypeIdAttributeInformation_methods(root_module, root_module['ns3::TypeId::AttributeInformation'])
register_Ns3TypeIdTraceSourceInformation_methods(root_module, root_module['ns3::TypeId::TraceSourceInformation'])
register_Ns3Empty_methods(root_module, root_module['ns3::empty'])
register_Ns3Int64x64_t_methods(root_module, root_module['ns3::int64x64_t'])
register_Ns3Object_methods(root_module, root_module['ns3::Object'])
register_Ns3ObjectAggregateIterator_methods(root_module, root_module['ns3::Object::AggregateIterator'])
register_Ns3SimpleRefCount__Ns3AttributeAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeAccessor__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >'])
register_Ns3SimpleRefCount__Ns3AttributeChecker_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeChecker__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >'])
register_Ns3SimpleRefCount__Ns3AttributeValue_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeValue__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >'])
register_Ns3SimpleRefCount__Ns3CallbackImplBase_Ns3Empty_Ns3DefaultDeleter__lt__ns3CallbackImplBase__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >'])
register_Ns3SimpleRefCount__Ns3TraceSourceAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3TraceSourceAccessor__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >'])
register_Ns3Time_methods(root_module, root_module['ns3::Time'])
register_Ns3TraceSourceAccessor_methods(root_module, root_module['ns3::TraceSourceAccessor'])
register_Ns3AttributeAccessor_methods(root_module, root_module['ns3::AttributeAccessor'])
register_Ns3AttributeChecker_methods(root_module, root_module['ns3::AttributeChecker'])
register_Ns3AttributeValue_methods(root_module, root_module['ns3::AttributeValue'])
register_Ns3CallbackChecker_methods(root_module, root_module['ns3::CallbackChecker'])
register_Ns3CallbackImplBase_methods(root_module, root_module['ns3::CallbackImplBase'])
register_Ns3CallbackValue_methods(root_module, root_module['ns3::CallbackValue'])
register_Ns3Channel_methods(root_module, root_module['ns3::Channel'])
register_Ns3EmptyAttributeValue_methods(root_module, root_module['ns3::EmptyAttributeValue'])
register_Ns3Ipv4AddressChecker_methods(root_module, root_module['ns3::Ipv4AddressChecker'])
register_Ns3Ipv4AddressValue_methods(root_module, root_module['ns3::Ipv4AddressValue'])
register_Ns3Ipv4MaskChecker_methods(root_module, root_module['ns3::Ipv4MaskChecker'])
register_Ns3Ipv4MaskValue_methods(root_module, root_module['ns3::Ipv4MaskValue'])
register_Ns3Ipv6AddressChecker_methods(root_module, root_module['ns3::Ipv6AddressChecker'])
register_Ns3Ipv6AddressValue_methods(root_module, root_module['ns3::Ipv6AddressValue'])
register_Ns3Ipv6PrefixChecker_methods(root_module, root_module['ns3::Ipv6PrefixChecker'])
register_Ns3Ipv6PrefixValue_methods(root_module, root_module['ns3::Ipv6PrefixValue'])
register_Ns3Mac48AddressChecker_methods(root_module, root_module['ns3::Mac48AddressChecker'])
register_Ns3Mac48AddressValue_methods(root_module, root_module['ns3::Mac48AddressValue'])
register_Ns3NetDevice_methods(root_module, root_module['ns3::NetDevice'])
register_Ns3ObjectFactoryChecker_methods(root_module, root_module['ns3::ObjectFactoryChecker'])
register_Ns3ObjectFactoryValue_methods(root_module, root_module['ns3::ObjectFactoryValue'])
register_Ns3TimeChecker_methods(root_module, root_module['ns3::TimeChecker'])
register_Ns3TimeValue_methods(root_module, root_module['ns3::TimeValue'])
register_Ns3TypeIdChecker_methods(root_module, root_module['ns3::TypeIdChecker'])
register_Ns3TypeIdValue_methods(root_module, root_module['ns3::TypeIdValue'])
register_Ns3AddressChecker_methods(root_module, root_module['ns3::AddressChecker'])
register_Ns3AddressValue_methods(root_module, root_module['ns3::AddressValue'])
register_Ns3BridgeChannel_methods(root_module, root_module['ns3::BridgeChannel'])
register_Ns3BridgeNetDevice_methods(root_module, root_module['ns3::BridgeNetDevice'])
return
def register_Ns3Address_methods(root_module, cls):
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## address.h (module 'network'): ns3::Address::Address() [constructor]
cls.add_constructor([])
## address.h (module 'network'): ns3::Address::Address(uint8_t type, uint8_t const * buffer, uint8_t len) [constructor]
cls.add_constructor([param('uint8_t', 'type'), param('uint8_t const *', 'buffer'), param('uint8_t', 'len')])
## address.h (module 'network'): ns3::Address::Address(ns3::Address const & address) [copy constructor]
cls.add_constructor([param('ns3::Address const &', 'address')])
## address.h (module 'network'): bool ns3::Address::CheckCompatible(uint8_t type, uint8_t len) const [member function]
cls.add_method('CheckCompatible',
'bool',
[param('uint8_t', 'type'), param('uint8_t', 'len')],
is_const=True)
## address.h (module 'network'): uint32_t ns3::Address::CopyAllFrom(uint8_t const * buffer, uint8_t len) [member function]
cls.add_method('CopyAllFrom',
'uint32_t',
[param('uint8_t const *', 'buffer'), param('uint8_t', 'len')])
## address.h (module 'network'): uint32_t ns3::Address::CopyAllTo(uint8_t * buffer, uint8_t len) const [member function]
cls.add_method('CopyAllTo',
'uint32_t',
[param('uint8_t *', 'buffer'), param('uint8_t', 'len')],
is_const=True)
## address.h (module 'network'): uint32_t ns3::Address::CopyFrom(uint8_t const * buffer, uint8_t len) [member function]
cls.add_method('CopyFrom',
'uint32_t',
[param('uint8_t const *', 'buffer'), param('uint8_t', 'len')])
## address.h (module 'network'): uint32_t ns3::Address::CopyTo(uint8_t * buffer) const [member function]
cls.add_method('CopyTo',
'uint32_t',
[param('uint8_t *', 'buffer')],
is_const=True)
## address.h (module 'network'): void ns3::Address::Deserialize(ns3::TagBuffer buffer) [member function]
cls.add_method('Deserialize',
'void',
[param('ns3::TagBuffer', 'buffer')])
## address.h (module 'network'): uint8_t ns3::Address::GetLength() const [member function]
cls.add_method('GetLength',
'uint8_t',
[],
is_const=True)
## address.h (module 'network'): uint32_t ns3::Address::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True)
## address.h (module 'network'): bool ns3::Address::IsInvalid() const [member function]
cls.add_method('IsInvalid',
'bool',
[],
is_const=True)
## address.h (module 'network'): bool ns3::Address::IsMatchingType(uint8_t type) const [member function]
cls.add_method('IsMatchingType',
'bool',
[param('uint8_t', 'type')],
is_const=True)
## address.h (module 'network'): static uint8_t ns3::Address::Register() [member function]
cls.add_method('Register',
'uint8_t',
[],
is_static=True)
## address.h (module 'network'): void ns3::Address::Serialize(ns3::TagBuffer buffer) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::TagBuffer', 'buffer')],
is_const=True)
return
def register_Ns3AttributeConstructionList_methods(root_module, cls):
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::AttributeConstructionList(ns3::AttributeConstructionList const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeConstructionList const &', 'arg0')])
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::AttributeConstructionList() [constructor]
cls.add_constructor([])
## attribute-construction-list.h (module 'core'): void ns3::AttributeConstructionList::Add(std::string name, ns3::Ptr<ns3::AttributeChecker const> checker, ns3::Ptr<ns3::AttributeValue> value) [member function]
cls.add_method('Add',
'void',
[param('std::string', 'name'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker'), param('ns3::Ptr< ns3::AttributeValue >', 'value')])
## attribute-construction-list.h (module 'core'): std::_List_const_iterator<ns3::AttributeConstructionList::Item> ns3::AttributeConstructionList::Begin() const [member function]
cls.add_method('Begin',
'std::_List_const_iterator< ns3::AttributeConstructionList::Item >',
[],
is_const=True)
## attribute-construction-list.h (module 'core'): std::_List_const_iterator<ns3::AttributeConstructionList::Item> ns3::AttributeConstructionList::End() const [member function]
cls.add_method('End',
'std::_List_const_iterator< ns3::AttributeConstructionList::Item >',
[],
is_const=True)
## attribute-construction-list.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeConstructionList::Find(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('Find',
'ns3::Ptr< ns3::AttributeValue >',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True)
return
def register_Ns3AttributeConstructionListItem_methods(root_module, cls):
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::Item() [constructor]
cls.add_constructor([])
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::Item(ns3::AttributeConstructionList::Item const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeConstructionList::Item const &', 'arg0')])
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::checker [variable]
cls.add_instance_attribute('checker', 'ns3::Ptr< ns3::AttributeChecker const >', is_const=False)
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::name [variable]
cls.add_instance_attribute('name', 'std::string', is_const=False)
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::value [variable]
cls.add_instance_attribute('value', 'ns3::Ptr< ns3::AttributeValue >', is_const=False)
return
def register_Ns3BridgeHelper_methods(root_module, cls):
## bridge-helper.h (module 'bridge'): ns3::BridgeHelper::BridgeHelper(ns3::BridgeHelper const & arg0) [copy constructor]
cls.add_constructor([param('ns3::BridgeHelper const &', 'arg0')])
## bridge-helper.h (module 'bridge'): ns3::BridgeHelper::BridgeHelper() [constructor]
cls.add_constructor([])
## bridge-helper.h (module 'bridge'): ns3::NetDeviceContainer ns3::BridgeHelper::Install(ns3::Ptr<ns3::Node> node, ns3::NetDeviceContainer c) [member function]
cls.add_method('Install',
'ns3::NetDeviceContainer',
[param('ns3::Ptr< ns3::Node >', 'node'), param('ns3::NetDeviceContainer', 'c')])
## bridge-helper.h (module 'bridge'): ns3::NetDeviceContainer ns3::BridgeHelper::Install(std::string nodeName, ns3::NetDeviceContainer c) [member function]
cls.add_method('Install',
'ns3::NetDeviceContainer',
[param('std::string', 'nodeName'), param('ns3::NetDeviceContainer', 'c')])
## bridge-helper.h (module 'bridge'): void ns3::BridgeHelper::SetDeviceAttribute(std::string n1, ns3::AttributeValue const & v1) [member function]
cls.add_method('SetDeviceAttribute',
'void',
[param('std::string', 'n1'), param('ns3::AttributeValue const &', 'v1')])
return
def register_Ns3CallbackBase_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackBase::CallbackBase(ns3::CallbackBase const & arg0) [copy constructor]
cls.add_constructor([param('ns3::CallbackBase const &', 'arg0')])
## callback.h (module 'core'): ns3::CallbackBase::CallbackBase() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::Ptr<ns3::CallbackImplBase> ns3::CallbackBase::GetImpl() const [member function]
cls.add_method('GetImpl',
'ns3::Ptr< ns3::CallbackImplBase >',
[],
is_const=True)
## callback.h (module 'core'): ns3::CallbackBase::CallbackBase(ns3::Ptr<ns3::CallbackImplBase> impl) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::CallbackImplBase >', 'impl')],
visibility='protected')
## callback.h (module 'core'): static std::string ns3::CallbackBase::Demangle(std::string const & mangled) [member function]
cls.add_method('Demangle',
'std::string',
[param('std::string const &', 'mangled')],
is_static=True, visibility='protected')
return
def register_Ns3Ipv4Address_methods(root_module, cls):
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## ipv4-address.h (module 'network'): ns3::Ipv4Address::Ipv4Address(ns3::Ipv4Address const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4Address const &', 'arg0')])
## ipv4-address.h (module 'network'): ns3::Ipv4Address::Ipv4Address() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4Address::Ipv4Address(uint32_t address) [constructor]
cls.add_constructor([param('uint32_t', 'address')])
## ipv4-address.h (module 'network'): ns3::Ipv4Address::Ipv4Address(char const * address) [constructor]
cls.add_constructor([param('char const *', 'address')])
## ipv4-address.h (module 'network'): ns3::Ipv4Address ns3::Ipv4Address::CombineMask(ns3::Ipv4Mask const & mask) const [member function]
cls.add_method('CombineMask',
'ns3::Ipv4Address',
[param('ns3::Ipv4Mask const &', 'mask')],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::ConvertFrom(ns3::Address const & address) [member function]
cls.add_method('ConvertFrom',
'ns3::Ipv4Address',
[param('ns3::Address const &', 'address')],
is_static=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::Deserialize(uint8_t const * buf) [member function]
cls.add_method('Deserialize',
'ns3::Ipv4Address',
[param('uint8_t const *', 'buf')],
is_static=True)
## ipv4-address.h (module 'network'): uint32_t ns3::Ipv4Address::Get() const [member function]
cls.add_method('Get',
'uint32_t',
[],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::GetAny() [member function]
cls.add_method('GetAny',
'ns3::Ipv4Address',
[],
is_static=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::GetBroadcast() [member function]
cls.add_method('GetBroadcast',
'ns3::Ipv4Address',
[],
is_static=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::GetLoopback() [member function]
cls.add_method('GetLoopback',
'ns3::Ipv4Address',
[],
is_static=True)
## ipv4-address.h (module 'network'): ns3::Ipv4Address ns3::Ipv4Address::GetSubnetDirectedBroadcast(ns3::Ipv4Mask const & mask) const [member function]
cls.add_method('GetSubnetDirectedBroadcast',
'ns3::Ipv4Address',
[param('ns3::Ipv4Mask const &', 'mask')],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::GetZero() [member function]
cls.add_method('GetZero',
'ns3::Ipv4Address',
[],
is_static=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsBroadcast() const [member function]
cls.add_method('IsBroadcast',
'bool',
[],
is_const=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsEqual(ns3::Ipv4Address const & other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ipv4Address const &', 'other')],
is_const=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsLocalMulticast() const [member function]
cls.add_method('IsLocalMulticast',
'bool',
[],
is_const=True)
## ipv4-address.h (module 'network'): static bool ns3::Ipv4Address::IsMatchingType(ns3::Address const & address) [member function]
cls.add_method('IsMatchingType',
'bool',
[param('ns3::Address const &', 'address')],
is_static=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsMulticast() const [member function]
cls.add_method('IsMulticast',
'bool',
[],
is_const=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsSubnetDirectedBroadcast(ns3::Ipv4Mask const & mask) const [member function]
cls.add_method('IsSubnetDirectedBroadcast',
'bool',
[param('ns3::Ipv4Mask const &', 'mask')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Address::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Address::Serialize(uint8_t * buf) const [member function]
cls.add_method('Serialize',
'void',
[param('uint8_t *', 'buf')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Address::Set(uint32_t address) [member function]
cls.add_method('Set',
'void',
[param('uint32_t', 'address')])
## ipv4-address.h (module 'network'): void ns3::Ipv4Address::Set(char const * address) [member function]
cls.add_method('Set',
'void',
[param('char const *', 'address')])
return
def register_Ns3Ipv4Mask_methods(root_module, cls):
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## ipv4-address.h (module 'network'): ns3::Ipv4Mask::Ipv4Mask(ns3::Ipv4Mask const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4Mask const &', 'arg0')])
## ipv4-address.h (module 'network'): ns3::Ipv4Mask::Ipv4Mask() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4Mask::Ipv4Mask(uint32_t mask) [constructor]
cls.add_constructor([param('uint32_t', 'mask')])
## ipv4-address.h (module 'network'): ns3::Ipv4Mask::Ipv4Mask(char const * mask) [constructor]
cls.add_constructor([param('char const *', 'mask')])
## ipv4-address.h (module 'network'): uint32_t ns3::Ipv4Mask::Get() const [member function]
cls.add_method('Get',
'uint32_t',
[],
is_const=True)
## ipv4-address.h (module 'network'): uint32_t ns3::Ipv4Mask::GetInverse() const [member function]
cls.add_method('GetInverse',
'uint32_t',
[],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Mask ns3::Ipv4Mask::GetLoopback() [member function]
cls.add_method('GetLoopback',
'ns3::Ipv4Mask',
[],
is_static=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Mask ns3::Ipv4Mask::GetOnes() [member function]
cls.add_method('GetOnes',
'ns3::Ipv4Mask',
[],
is_static=True)
## ipv4-address.h (module 'network'): uint16_t ns3::Ipv4Mask::GetPrefixLength() const [member function]
cls.add_method('GetPrefixLength',
'uint16_t',
[],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Mask ns3::Ipv4Mask::GetZero() [member function]
cls.add_method('GetZero',
'ns3::Ipv4Mask',
[],
is_static=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Mask::IsEqual(ns3::Ipv4Mask other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ipv4Mask', 'other')],
is_const=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Mask::IsMatch(ns3::Ipv4Address a, ns3::Ipv4Address b) const [member function]
cls.add_method('IsMatch',
'bool',
[param('ns3::Ipv4Address', 'a'), param('ns3::Ipv4Address', 'b')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Mask::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Mask::Set(uint32_t mask) [member function]
cls.add_method('Set',
'void',
[param('uint32_t', 'mask')])
return
def register_Ns3Ipv6Address_methods(root_module, cls):
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address(char const * address) [constructor]
cls.add_constructor([param('char const *', 'address')])
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address(uint8_t * address) [constructor]
cls.add_constructor([param('uint8_t *', 'address')])
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address(ns3::Ipv6Address const & addr) [copy constructor]
cls.add_constructor([param('ns3::Ipv6Address const &', 'addr')])
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address(ns3::Ipv6Address const * addr) [constructor]
cls.add_constructor([param('ns3::Ipv6Address const *', 'addr')])
## ipv6-address.h (module 'network'): ns3::Ipv6Address ns3::Ipv6Address::CombinePrefix(ns3::Ipv6Prefix const & prefix) [member function]
cls.add_method('CombinePrefix',
'ns3::Ipv6Address',
[param('ns3::Ipv6Prefix const &', 'prefix')])
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::ConvertFrom(ns3::Address const & address) [member function]
cls.add_method('ConvertFrom',
'ns3::Ipv6Address',
[param('ns3::Address const &', 'address')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::Deserialize(uint8_t const * buf) [member function]
cls.add_method('Deserialize',
'ns3::Ipv6Address',
[param('uint8_t const *', 'buf')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetAllHostsMulticast() [member function]
cls.add_method('GetAllHostsMulticast',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetAllNodesMulticast() [member function]
cls.add_method('GetAllNodesMulticast',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetAllRoutersMulticast() [member function]
cls.add_method('GetAllRoutersMulticast',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetAny() [member function]
cls.add_method('GetAny',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::GetBytes(uint8_t * buf) const [member function]
cls.add_method('GetBytes',
'void',
[param('uint8_t *', 'buf')],
is_const=True)
## ipv6-address.h (module 'network'): ns3::Ipv4Address ns3::Ipv6Address::GetIpv4MappedAddress() const [member function]
cls.add_method('GetIpv4MappedAddress',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetLoopback() [member function]
cls.add_method('GetLoopback',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetOnes() [member function]
cls.add_method('GetOnes',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetZero() [member function]
cls.add_method('GetZero',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsAllHostsMulticast() const [member function]
cls.add_method('IsAllHostsMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsAllNodesMulticast() const [member function]
cls.add_method('IsAllNodesMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsAllRoutersMulticast() const [member function]
cls.add_method('IsAllRoutersMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsAny() const [member function]
cls.add_method('IsAny',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsEqual(ns3::Ipv6Address const & other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ipv6Address const &', 'other')],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsIpv4MappedAddress() [member function]
cls.add_method('IsIpv4MappedAddress',
'bool',
[])
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsLinkLocal() const [member function]
cls.add_method('IsLinkLocal',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsLinkLocalMulticast() const [member function]
cls.add_method('IsLinkLocalMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsLocalhost() const [member function]
cls.add_method('IsLocalhost',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): static bool ns3::Ipv6Address::IsMatchingType(ns3::Address const & address) [member function]
cls.add_method('IsMatchingType',
'bool',
[param('ns3::Address const &', 'address')],
is_static=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsMulticast() const [member function]
cls.add_method('IsMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsSolicitedMulticast() const [member function]
cls.add_method('IsSolicitedMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredAddress(ns3::Mac48Address addr, ns3::Ipv6Address prefix) [member function]
cls.add_method('MakeAutoconfiguredAddress',
'ns3::Ipv6Address',
[param('ns3::Mac48Address', 'addr'), param('ns3::Ipv6Address', 'prefix')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredLinkLocalAddress(ns3::Mac48Address mac) [member function]
cls.add_method('MakeAutoconfiguredLinkLocalAddress',
'ns3::Ipv6Address',
[param('ns3::Mac48Address', 'mac')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeIpv4MappedAddress(ns3::Ipv4Address addr) [member function]
cls.add_method('MakeIpv4MappedAddress',
'ns3::Ipv6Address',
[param('ns3::Ipv4Address', 'addr')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeSolicitedAddress(ns3::Ipv6Address addr) [member function]
cls.add_method('MakeSolicitedAddress',
'ns3::Ipv6Address',
[param('ns3::Ipv6Address', 'addr')],
is_static=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::Serialize(uint8_t * buf) const [member function]
cls.add_method('Serialize',
'void',
[param('uint8_t *', 'buf')],
is_const=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::Set(char const * address) [member function]
cls.add_method('Set',
'void',
[param('char const *', 'address')])
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::Set(uint8_t * address) [member function]
cls.add_method('Set',
'void',
[param('uint8_t *', 'address')])
return
def register_Ns3Ipv6Prefix_methods(root_module, cls):
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(uint8_t * prefix) [constructor]
cls.add_constructor([param('uint8_t *', 'prefix')])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(char const * prefix) [constructor]
cls.add_constructor([param('char const *', 'prefix')])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(uint8_t prefix) [constructor]
cls.add_constructor([param('uint8_t', 'prefix')])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(ns3::Ipv6Prefix const & prefix) [copy constructor]
cls.add_constructor([param('ns3::Ipv6Prefix const &', 'prefix')])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(ns3::Ipv6Prefix const * prefix) [constructor]
cls.add_constructor([param('ns3::Ipv6Prefix const *', 'prefix')])
## ipv6-address.h (module 'network'): void ns3::Ipv6Prefix::GetBytes(uint8_t * buf) const [member function]
cls.add_method('GetBytes',
'void',
[param('uint8_t *', 'buf')],
is_const=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Prefix ns3::Ipv6Prefix::GetLoopback() [member function]
cls.add_method('GetLoopback',
'ns3::Ipv6Prefix',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Prefix ns3::Ipv6Prefix::GetOnes() [member function]
cls.add_method('GetOnes',
'ns3::Ipv6Prefix',
[],
is_static=True)
## ipv6-address.h (module 'network'): uint8_t ns3::Ipv6Prefix::GetPrefixLength() const [member function]
cls.add_method('GetPrefixLength',
'uint8_t',
[],
is_const=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Prefix ns3::Ipv6Prefix::GetZero() [member function]
cls.add_method('GetZero',
'ns3::Ipv6Prefix',
[],
is_static=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Prefix::IsEqual(ns3::Ipv6Prefix const & other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ipv6Prefix const &', 'other')],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Prefix::IsMatch(ns3::Ipv6Address a, ns3::Ipv6Address b) const [member function]
cls.add_method('IsMatch',
'bool',
[param('ns3::Ipv6Address', 'a'), param('ns3::Ipv6Address', 'b')],
is_const=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Prefix::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
return
def register_Ns3Mac48Address_methods(root_module, cls):
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## mac48-address.h (module 'network'): ns3::Mac48Address::Mac48Address(ns3::Mac48Address const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Mac48Address const &', 'arg0')])
## mac48-address.h (module 'network'): ns3::Mac48Address::Mac48Address() [constructor]
cls.add_constructor([])
## mac48-address.h (module 'network'): ns3::Mac48Address::Mac48Address(char const * str) [constructor]
cls.add_constructor([param('char const *', 'str')])
## mac48-address.h (module 'network'): static ns3::Mac48Address ns3::Mac48Address::Allocate() [member function]
cls.add_method('Allocate',
'ns3::Mac48Address',
[],
is_static=True)
## mac48-address.h (module 'network'): static ns3::Mac48Address ns3::Mac48Address::ConvertFrom(ns3::Address const & address) [member function]
cls.add_method('ConvertFrom',
'ns3::Mac48Address',
[param('ns3::Address const &', 'address')],
is_static=True)
## mac48-address.h (module 'network'): void ns3::Mac48Address::CopyFrom(uint8_t const * buffer) [member function]
cls.add_method('CopyFrom',
'void',
[param('uint8_t const *', 'buffer')])
## mac48-address.h (module 'network'): void ns3::Mac48Address::CopyTo(uint8_t * buffer) const [member function]
cls.add_method('CopyTo',
'void',
[param('uint8_t *', 'buffer')],
is_const=True)
## mac48-address.h (module 'network'): static ns3::Mac48Address ns3::Mac48Address::GetBroadcast() [member function]
cls.add_method('GetBroadcast',
'ns3::Mac48Address',
[],
is_static=True)
## mac48-address.h (module 'network'): static ns3::Mac48Address ns3::Mac48Address::GetMulticast(ns3::Ipv4Address address) [member function]
cls.add_method('GetMulticast',
'ns3::Mac48Address',
[param('ns3::Ipv4Address', 'address')],
is_static=True)
## mac48-address.h (module 'network'): static ns3::Mac48Address ns3::Mac48Address::GetMulticast(ns3::Ipv6Address address) [member function]
cls.add_method('GetMulticast',
'ns3::Mac48Address',
[param('ns3::Ipv6Address', 'address')],
is_static=True)
## mac48-address.h (module 'network'): static ns3::Mac48Address ns3::Mac48Address::GetMulticast6Prefix() [member function]
cls.add_method('GetMulticast6Prefix',
'ns3::Mac48Address',
[],
is_static=True)
## mac48-address.h (module 'network'): static ns3::Mac48Address ns3::Mac48Address::GetMulticastPrefix() [member function]
cls.add_method('GetMulticastPrefix',
'ns3::Mac48Address',
[],
is_static=True)
## mac48-address.h (module 'network'): bool ns3::Mac48Address::IsBroadcast() const [member function]
cls.add_method('IsBroadcast',
'bool',
[],
is_const=True)
## mac48-address.h (module 'network'): bool ns3::Mac48Address::IsGroup() const [member function]
cls.add_method('IsGroup',
'bool',
[],
is_const=True)
## mac48-address.h (module 'network'): static bool ns3::Mac48Address::IsMatchingType(ns3::Address const & address) [member function]
cls.add_method('IsMatchingType',
'bool',
[param('ns3::Address const &', 'address')],
is_static=True)
return
def register_Ns3NetDeviceContainer_methods(root_module, cls):
## net-device-container.h (module 'network'): ns3::NetDeviceContainer::NetDeviceContainer(ns3::NetDeviceContainer const & arg0) [copy constructor]
cls.add_constructor([param('ns3::NetDeviceContainer const &', 'arg0')])
## net-device-container.h (module 'network'): ns3::NetDeviceContainer::NetDeviceContainer() [constructor]
cls.add_constructor([])
## net-device-container.h (module 'network'): ns3::NetDeviceContainer::NetDeviceContainer(ns3::Ptr<ns3::NetDevice> dev) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::NetDevice >', 'dev')])
## net-device-container.h (module 'network'): ns3::NetDeviceContainer::NetDeviceContainer(std::string devName) [constructor]
cls.add_constructor([param('std::string', 'devName')])
## net-device-container.h (module 'network'): ns3::NetDeviceContainer::NetDeviceContainer(ns3::NetDeviceContainer const & a, ns3::NetDeviceContainer const & b) [constructor]
cls.add_constructor([param('ns3::NetDeviceContainer const &', 'a'), param('ns3::NetDeviceContainer const &', 'b')])
## net-device-container.h (module 'network'): void ns3::NetDeviceContainer::Add(ns3::NetDeviceContainer other) [member function]
cls.add_method('Add',
'void',
[param('ns3::NetDeviceContainer', 'other')])
## net-device-container.h (module 'network'): void ns3::NetDeviceContainer::Add(ns3::Ptr<ns3::NetDevice> device) [member function]
cls.add_method('Add',
'void',
[param('ns3::Ptr< ns3::NetDevice >', 'device')])
## net-device-container.h (module 'network'): void ns3::NetDeviceContainer::Add(std::string deviceName) [member function]
cls.add_method('Add',
'void',
[param('std::string', 'deviceName')])
## net-device-container.h (module 'network'): __gnu_cxx::__normal_iterator<const ns3::Ptr<ns3::NetDevice>*,std::vector<ns3::Ptr<ns3::NetDevice>, std::allocator<ns3::Ptr<ns3::NetDevice> > > > ns3::NetDeviceContainer::Begin() const [member function]
cls.add_method('Begin',
'__gnu_cxx::__normal_iterator< ns3::Ptr< ns3::NetDevice > const, std::vector< ns3::Ptr< ns3::NetDevice > > >',
[],
is_const=True)
## net-device-container.h (module 'network'): __gnu_cxx::__normal_iterator<const ns3::Ptr<ns3::NetDevice>*,std::vector<ns3::Ptr<ns3::NetDevice>, std::allocator<ns3::Ptr<ns3::NetDevice> > > > ns3::NetDeviceContainer::End() const [member function]
cls.add_method('End',
'__gnu_cxx::__normal_iterator< ns3::Ptr< ns3::NetDevice > const, std::vector< ns3::Ptr< ns3::NetDevice > > >',
[],
is_const=True)
## net-device-container.h (module 'network'): ns3::Ptr<ns3::NetDevice> ns3::NetDeviceContainer::Get(uint32_t i) const [member function]
cls.add_method('Get',
'ns3::Ptr< ns3::NetDevice >',
[param('uint32_t', 'i')],
is_const=True)
## net-device-container.h (module 'network'): uint32_t ns3::NetDeviceContainer::GetN() const [member function]
cls.add_method('GetN',
'uint32_t',
[],
is_const=True)
return
def register_Ns3ObjectBase_methods(root_module, cls):
## object-base.h (module 'core'): ns3::ObjectBase::ObjectBase() [constructor]
cls.add_constructor([])
## object-base.h (module 'core'): ns3::ObjectBase::ObjectBase(ns3::ObjectBase const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ObjectBase const &', 'arg0')])
## object-base.h (module 'core'): void ns3::ObjectBase::GetAttribute(std::string name, ns3::AttributeValue & value) const [member function]
cls.add_method('GetAttribute',
'void',
[param('std::string', 'name'), param('ns3::AttributeValue &', 'value')],
is_const=True)
## object-base.h (module 'core'): bool ns3::ObjectBase::GetAttributeFailSafe(std::string name, ns3::AttributeValue & attribute) const [member function]
cls.add_method('GetAttributeFailSafe',
'bool',
[param('std::string', 'name'), param('ns3::AttributeValue &', 'attribute')],
is_const=True)
## object-base.h (module 'core'): ns3::TypeId ns3::ObjectBase::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## object-base.h (module 'core'): static ns3::TypeId ns3::ObjectBase::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## object-base.h (module 'core'): void ns3::ObjectBase::SetAttribute(std::string name, ns3::AttributeValue const & value) [member function]
cls.add_method('SetAttribute',
'void',
[param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')])
## object-base.h (module 'core'): bool ns3::ObjectBase::SetAttributeFailSafe(std::string name, ns3::AttributeValue const & value) [member function]
cls.add_method('SetAttributeFailSafe',
'bool',
[param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')])
## object-base.h (module 'core'): bool ns3::ObjectBase::TraceConnect(std::string name, std::string context, ns3::CallbackBase const & cb) [member function]
cls.add_method('TraceConnect',
'bool',
[param('std::string', 'name'), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')])
## object-base.h (module 'core'): bool ns3::ObjectBase::TraceConnectWithoutContext(std::string name, ns3::CallbackBase const & cb) [member function]
cls.add_method('TraceConnectWithoutContext',
'bool',
[param('std::string', 'name'), param('ns3::CallbackBase const &', 'cb')])
## object-base.h (module 'core'): bool ns3::ObjectBase::TraceDisconnect(std::string name, std::string context, ns3::CallbackBase const & cb) [member function]
cls.add_method('TraceDisconnect',
'bool',
[param('std::string', 'name'), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')])
## object-base.h (module 'core'): bool ns3::ObjectBase::TraceDisconnectWithoutContext(std::string name, ns3::CallbackBase const & cb) [member function]
cls.add_method('TraceDisconnectWithoutContext',
'bool',
[param('std::string', 'name'), param('ns3::CallbackBase const &', 'cb')])
## object-base.h (module 'core'): void ns3::ObjectBase::ConstructSelf(ns3::AttributeConstructionList const & attributes) [member function]
cls.add_method('ConstructSelf',
'void',
[param('ns3::AttributeConstructionList const &', 'attributes')],
visibility='protected')
## object-base.h (module 'core'): void ns3::ObjectBase::NotifyConstructionCompleted() [member function]
cls.add_method('NotifyConstructionCompleted',
'void',
[],
visibility='protected', is_virtual=True)
return
def register_Ns3ObjectDeleter_methods(root_module, cls):
## object.h (module 'core'): ns3::ObjectDeleter::ObjectDeleter() [constructor]
cls.add_constructor([])
## object.h (module 'core'): ns3::ObjectDeleter::ObjectDeleter(ns3::ObjectDeleter const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ObjectDeleter const &', 'arg0')])
## object.h (module 'core'): static void ns3::ObjectDeleter::Delete(ns3::Object * object) [member function]
cls.add_method('Delete',
'void',
[param('ns3::Object *', 'object')],
is_static=True)
return
def register_Ns3ObjectFactory_methods(root_module, cls):
cls.add_output_stream_operator()
## object-factory.h (module 'core'): ns3::ObjectFactory::ObjectFactory(ns3::ObjectFactory const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ObjectFactory const &', 'arg0')])
## object-factory.h (module 'core'): ns3::ObjectFactory::ObjectFactory() [constructor]
cls.add_constructor([])
## object-factory.h (module 'core'): ns3::ObjectFactory::ObjectFactory(std::string typeId) [constructor]
cls.add_constructor([param('std::string', 'typeId')])
## object-factory.h (module 'core'): ns3::Ptr<ns3::Object> ns3::ObjectFactory::Create() const [member function]
cls.add_method('Create',
'ns3::Ptr< ns3::Object >',
[],
is_const=True)
## object-factory.h (module 'core'): ns3::TypeId ns3::ObjectFactory::GetTypeId() const [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_const=True)
## object-factory.h (module 'core'): void ns3::ObjectFactory::Set(std::string name, ns3::AttributeValue const & value) [member function]
cls.add_method('Set',
'void',
[param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')])
## object-factory.h (module 'core'): void ns3::ObjectFactory::SetTypeId(ns3::TypeId tid) [member function]
cls.add_method('SetTypeId',
'void',
[param('ns3::TypeId', 'tid')])
## object-factory.h (module 'core'): void ns3::ObjectFactory::SetTypeId(char const * tid) [member function]
cls.add_method('SetTypeId',
'void',
[param('char const *', 'tid')])
## object-factory.h (module 'core'): void ns3::ObjectFactory::SetTypeId(std::string tid) [member function]
cls.add_method('SetTypeId',
'void',
[param('std::string', 'tid')])
return
def register_Ns3SimpleRefCount__Ns3Object_Ns3ObjectBase_Ns3ObjectDeleter_methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter>::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter>::SimpleRefCount(ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter> const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter>::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3TagBuffer_methods(root_module, cls):
## tag-buffer.h (module 'network'): ns3::TagBuffer::TagBuffer(ns3::TagBuffer const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TagBuffer const &', 'arg0')])
## tag-buffer.h (module 'network'): ns3::TagBuffer::TagBuffer(uint8_t * start, uint8_t * end) [constructor]
cls.add_constructor([param('uint8_t *', 'start'), param('uint8_t *', 'end')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::CopyFrom(ns3::TagBuffer o) [member function]
cls.add_method('CopyFrom',
'void',
[param('ns3::TagBuffer', 'o')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::Read(uint8_t * buffer, uint32_t size) [member function]
cls.add_method('Read',
'void',
[param('uint8_t *', 'buffer'), param('uint32_t', 'size')])
## tag-buffer.h (module 'network'): double ns3::TagBuffer::ReadDouble() [member function]
cls.add_method('ReadDouble',
'double',
[])
## tag-buffer.h (module 'network'): uint16_t ns3::TagBuffer::ReadU16() [member function]
cls.add_method('ReadU16',
'uint16_t',
[])
## tag-buffer.h (module 'network'): uint32_t ns3::TagBuffer::ReadU32() [member function]
cls.add_method('ReadU32',
'uint32_t',
[])
## tag-buffer.h (module 'network'): uint64_t ns3::TagBuffer::ReadU64() [member function]
cls.add_method('ReadU64',
'uint64_t',
[])
## tag-buffer.h (module 'network'): uint8_t ns3::TagBuffer::ReadU8() [member function]
cls.add_method('ReadU8',
'uint8_t',
[])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::TrimAtEnd(uint32_t trim) [member function]
cls.add_method('TrimAtEnd',
'void',
[param('uint32_t', 'trim')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::Write(uint8_t const * buffer, uint32_t size) [member function]
cls.add_method('Write',
'void',
[param('uint8_t const *', 'buffer'), param('uint32_t', 'size')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteDouble(double v) [member function]
cls.add_method('WriteDouble',
'void',
[param('double', 'v')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteU16(uint16_t data) [member function]
cls.add_method('WriteU16',
'void',
[param('uint16_t', 'data')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteU32(uint32_t data) [member function]
cls.add_method('WriteU32',
'void',
[param('uint32_t', 'data')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteU64(uint64_t v) [member function]
cls.add_method('WriteU64',
'void',
[param('uint64_t', 'v')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteU8(uint8_t v) [member function]
cls.add_method('WriteU8',
'void',
[param('uint8_t', 'v')])
return
def register_Ns3TypeId_methods(root_module, cls):
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## type-id.h (module 'core'): ns3::TypeId::TypeId(char const * name) [constructor]
cls.add_constructor([param('char const *', 'name')])
## type-id.h (module 'core'): ns3::TypeId::TypeId() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeId::TypeId(ns3::TypeId const & o) [copy constructor]
cls.add_constructor([param('ns3::TypeId const &', 'o')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddAttribute(std::string name, std::string help, ns3::AttributeValue const & initialValue, ns3::Ptr<ns3::AttributeAccessor const> accessor, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('AddAttribute',
'ns3::TypeId',
[param('std::string', 'name'), param('std::string', 'help'), param('ns3::AttributeValue const &', 'initialValue'), param('ns3::Ptr< ns3::AttributeAccessor const >', 'accessor'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddAttribute(std::string name, std::string help, uint32_t flags, ns3::AttributeValue const & initialValue, ns3::Ptr<ns3::AttributeAccessor const> accessor, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('AddAttribute',
'ns3::TypeId',
[param('std::string', 'name'), param('std::string', 'help'), param('uint32_t', 'flags'), param('ns3::AttributeValue const &', 'initialValue'), param('ns3::Ptr< ns3::AttributeAccessor const >', 'accessor'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddTraceSource(std::string name, std::string help, ns3::Ptr<ns3::TraceSourceAccessor const> accessor) [member function]
cls.add_method('AddTraceSource',
'ns3::TypeId',
[param('std::string', 'name'), param('std::string', 'help'), param('ns3::Ptr< ns3::TraceSourceAccessor const >', 'accessor')])
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation ns3::TypeId::GetAttribute(uint32_t i) const [member function]
cls.add_method('GetAttribute',
'ns3::TypeId::AttributeInformation',
[param('uint32_t', 'i')],
is_const=True)
## type-id.h (module 'core'): std::string ns3::TypeId::GetAttributeFullName(uint32_t i) const [member function]
cls.add_method('GetAttributeFullName',
'std::string',
[param('uint32_t', 'i')],
is_const=True)
## type-id.h (module 'core'): uint32_t ns3::TypeId::GetAttributeN() const [member function]
cls.add_method('GetAttributeN',
'uint32_t',
[],
is_const=True)
## type-id.h (module 'core'): ns3::Callback<ns3::ObjectBase*,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> ns3::TypeId::GetConstructor() const [member function]
cls.add_method('GetConstructor',
'ns3::Callback< ns3::ObjectBase *, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >',
[],
is_const=True)
## type-id.h (module 'core'): std::string ns3::TypeId::GetGroupName() const [member function]
cls.add_method('GetGroupName',
'std::string',
[],
is_const=True)
## type-id.h (module 'core'): std::string ns3::TypeId::GetName() const [member function]
cls.add_method('GetName',
'std::string',
[],
is_const=True)
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::GetParent() const [member function]
cls.add_method('GetParent',
'ns3::TypeId',
[],
is_const=True)
## type-id.h (module 'core'): static ns3::TypeId ns3::TypeId::GetRegistered(uint32_t i) [member function]
cls.add_method('GetRegistered',
'ns3::TypeId',
[param('uint32_t', 'i')],
is_static=True)
## type-id.h (module 'core'): static uint32_t ns3::TypeId::GetRegisteredN() [member function]
cls.add_method('GetRegisteredN',
'uint32_t',
[],
is_static=True)
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation ns3::TypeId::GetTraceSource(uint32_t i) const [member function]
cls.add_method('GetTraceSource',
'ns3::TypeId::TraceSourceInformation',
[param('uint32_t', 'i')],
is_const=True)
## type-id.h (module 'core'): uint32_t ns3::TypeId::GetTraceSourceN() const [member function]
cls.add_method('GetTraceSourceN',
'uint32_t',
[],
is_const=True)
## type-id.h (module 'core'): uint16_t ns3::TypeId::GetUid() const [member function]
cls.add_method('GetUid',
'uint16_t',
[],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::HasConstructor() const [member function]
cls.add_method('HasConstructor',
'bool',
[],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::HasParent() const [member function]
cls.add_method('HasParent',
'bool',
[],
is_const=True)
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::HideFromDocumentation() [member function]
cls.add_method('HideFromDocumentation',
'ns3::TypeId',
[])
## type-id.h (module 'core'): bool ns3::TypeId::IsChildOf(ns3::TypeId other) const [member function]
cls.add_method('IsChildOf',
'bool',
[param('ns3::TypeId', 'other')],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::LookupAttributeByName(std::string name, ns3::TypeId::AttributeInformation * info) const [member function]
cls.add_method('LookupAttributeByName',
'bool',
[param('std::string', 'name'), param('ns3::TypeId::AttributeInformation *', 'info', transfer_ownership=False)],
is_const=True)
## type-id.h (module 'core'): static ns3::TypeId ns3::TypeId::LookupByName(std::string name) [member function]
cls.add_method('LookupByName',
'ns3::TypeId',
[param('std::string', 'name')],
is_static=True)
## type-id.h (module 'core'): ns3::Ptr<ns3::TraceSourceAccessor const> ns3::TypeId::LookupTraceSourceByName(std::string name) const [member function]
cls.add_method('LookupTraceSourceByName',
'ns3::Ptr< ns3::TraceSourceAccessor const >',
[param('std::string', 'name')],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::MustHideFromDocumentation() const [member function]
cls.add_method('MustHideFromDocumentation',
'bool',
[],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::SetAttributeInitialValue(uint32_t i, ns3::Ptr<ns3::AttributeValue const> initialValue) [member function]
cls.add_method('SetAttributeInitialValue',
'bool',
[param('uint32_t', 'i'), param('ns3::Ptr< ns3::AttributeValue const >', 'initialValue')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::SetGroupName(std::string groupName) [member function]
cls.add_method('SetGroupName',
'ns3::TypeId',
[param('std::string', 'groupName')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::SetParent(ns3::TypeId tid) [member function]
cls.add_method('SetParent',
'ns3::TypeId',
[param('ns3::TypeId', 'tid')])
## type-id.h (module 'core'): void ns3::TypeId::SetUid(uint16_t tid) [member function]
cls.add_method('SetUid',
'void',
[param('uint16_t', 'tid')])
return
def register_Ns3TypeIdAttributeInformation_methods(root_module, cls):
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::AttributeInformation() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::AttributeInformation(ns3::TypeId::AttributeInformation const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TypeId::AttributeInformation const &', 'arg0')])
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::accessor [variable]
cls.add_instance_attribute('accessor', 'ns3::Ptr< ns3::AttributeAccessor const >', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::checker [variable]
cls.add_instance_attribute('checker', 'ns3::Ptr< ns3::AttributeChecker const >', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::flags [variable]
cls.add_instance_attribute('flags', 'uint32_t', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::help [variable]
cls.add_instance_attribute('help', 'std::string', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::initialValue [variable]
cls.add_instance_attribute('initialValue', 'ns3::Ptr< ns3::AttributeValue const >', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::name [variable]
cls.add_instance_attribute('name', 'std::string', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::originalInitialValue [variable]
cls.add_instance_attribute('originalInitialValue', 'ns3::Ptr< ns3::AttributeValue const >', is_const=False)
return
def register_Ns3TypeIdTraceSourceInformation_methods(root_module, cls):
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::TraceSourceInformation() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::TraceSourceInformation(ns3::TypeId::TraceSourceInformation const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TypeId::TraceSourceInformation const &', 'arg0')])
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::accessor [variable]
cls.add_instance_attribute('accessor', 'ns3::Ptr< ns3::TraceSourceAccessor const >', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::help [variable]
cls.add_instance_attribute('help', 'std::string', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::name [variable]
cls.add_instance_attribute('name', 'std::string', is_const=False)
return
def register_Ns3Empty_methods(root_module, cls):
## empty.h (module 'core'): ns3::empty::empty() [constructor]
cls.add_constructor([])
## empty.h (module 'core'): ns3::empty::empty(ns3::empty const & arg0) [copy constructor]
cls.add_constructor([param('ns3::empty const &', 'arg0')])
return
def register_Ns3Int64x64_t_methods(root_module, cls):
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long long unsigned int const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long unsigned int const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('unsigned int const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('short unsigned int const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('unsigned char const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long long int const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long int const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('int const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('short int const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('signed char const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('double const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('ns3::int64x64_t const &', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long long unsigned int const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long unsigned int const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('unsigned int const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('short unsigned int const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('unsigned char const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long long int const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long int const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('int const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('short int const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('signed char const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('double const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('ns3::int64x64_t const &', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long long unsigned int const', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long unsigned int const', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('unsigned int const', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('short unsigned int const', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('unsigned char const', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long long int const', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long int const', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('int const', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('short int const', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('signed char const', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('double const', 'right'))
cls.add_unary_numeric_operator('-')
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('ns3::int64x64_t const &', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long long unsigned int const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long unsigned int const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('unsigned int const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('short unsigned int const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('unsigned char const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long long int const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long int const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('int const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('short int const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('signed char const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('double const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('ns3::int64x64_t const &', 'right'))
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('>')
cls.add_binary_comparison_operator('!=')
cls.add_inplace_numeric_operator('*=', param('ns3::int64x64_t const &', 'right'))
cls.add_inplace_numeric_operator('+=', param('ns3::int64x64_t const &', 'right'))
cls.add_inplace_numeric_operator('-=', param('ns3::int64x64_t const &', 'right'))
cls.add_inplace_numeric_operator('/=', param('ns3::int64x64_t const &', 'right'))
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('<=')
cls.add_binary_comparison_operator('==')
cls.add_binary_comparison_operator('>=')
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t() [constructor]
cls.add_constructor([])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(double v) [constructor]
cls.add_constructor([param('double', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(int v) [constructor]
cls.add_constructor([param('int', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(long int v) [constructor]
cls.add_constructor([param('long int', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(long long int v) [constructor]
cls.add_constructor([param('long long int', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(unsigned int v) [constructor]
cls.add_constructor([param('unsigned int', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(long unsigned int v) [constructor]
cls.add_constructor([param('long unsigned int', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(long long unsigned int v) [constructor]
cls.add_constructor([param('long long unsigned int', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(int64_t hi, uint64_t lo) [constructor]
cls.add_constructor([param('int64_t', 'hi'), param('uint64_t', 'lo')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(ns3::int64x64_t const & o) [copy constructor]
cls.add_constructor([param('ns3::int64x64_t const &', 'o')])
## int64x64-double.h (module 'core'): double ns3::int64x64_t::GetDouble() const [member function]
cls.add_method('GetDouble',
'double',
[],
is_const=True)
## int64x64-double.h (module 'core'): int64_t ns3::int64x64_t::GetHigh() const [member function]
cls.add_method('GetHigh',
'int64_t',
[],
is_const=True)
## int64x64-double.h (module 'core'): uint64_t ns3::int64x64_t::GetLow() const [member function]
cls.add_method('GetLow',
'uint64_t',
[],
is_const=True)
## int64x64-double.h (module 'core'): static ns3::int64x64_t ns3::int64x64_t::Invert(uint64_t v) [member function]
cls.add_method('Invert',
'ns3::int64x64_t',
[param('uint64_t', 'v')],
is_static=True)
## int64x64-double.h (module 'core'): void ns3::int64x64_t::MulByInvert(ns3::int64x64_t const & o) [member function]
cls.add_method('MulByInvert',
'void',
[param('ns3::int64x64_t const &', 'o')])
return
def register_Ns3Object_methods(root_module, cls):
## object.h (module 'core'): ns3::Object::Object() [constructor]
cls.add_constructor([])
## object.h (module 'core'): void ns3::Object::AggregateObject(ns3::Ptr<ns3::Object> other) [member function]
cls.add_method('AggregateObject',
'void',
[param('ns3::Ptr< ns3::Object >', 'other')])
## object.h (module 'core'): void ns3::Object::Dispose() [member function]
cls.add_method('Dispose',
'void',
[])
## object.h (module 'core'): ns3::Object::AggregateIterator ns3::Object::GetAggregateIterator() const [member function]
cls.add_method('GetAggregateIterator',
'ns3::Object::AggregateIterator',
[],
is_const=True)
## object.h (module 'core'): ns3::TypeId ns3::Object::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## object.h (module 'core'): static ns3::TypeId ns3::Object::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## object.h (module 'core'): void ns3::Object::Start() [member function]
cls.add_method('Start',
'void',
[])
## object.h (module 'core'): ns3::Object::Object(ns3::Object const & o) [copy constructor]
cls.add_constructor([param('ns3::Object const &', 'o')],
visibility='protected')
## object.h (module 'core'): void ns3::Object::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
## object.h (module 'core'): void ns3::Object::DoStart() [member function]
cls.add_method('DoStart',
'void',
[],
visibility='protected', is_virtual=True)
## object.h (module 'core'): void ns3::Object::NotifyNewAggregate() [member function]
cls.add_method('NotifyNewAggregate',
'void',
[],
visibility='protected', is_virtual=True)
return
def register_Ns3ObjectAggregateIterator_methods(root_module, cls):
## object.h (module 'core'): ns3::Object::AggregateIterator::AggregateIterator(ns3::Object::AggregateIterator const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Object::AggregateIterator const &', 'arg0')])
## object.h (module 'core'): ns3::Object::AggregateIterator::AggregateIterator() [constructor]
cls.add_constructor([])
## object.h (module 'core'): bool ns3::Object::AggregateIterator::HasNext() const [member function]
cls.add_method('HasNext',
'bool',
[],
is_const=True)
## object.h (module 'core'): ns3::Ptr<ns3::Object const> ns3::Object::AggregateIterator::Next() [member function]
cls.add_method('Next',
'ns3::Ptr< ns3::Object const >',
[])
return
def register_Ns3SimpleRefCount__Ns3AttributeAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeAccessor__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >::SimpleRefCount(ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter< ns3::AttributeAccessor > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3AttributeChecker_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeChecker__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >::SimpleRefCount(ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter< ns3::AttributeChecker > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3AttributeValue_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeValue__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >::SimpleRefCount(ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter< ns3::AttributeValue > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3CallbackImplBase_Ns3Empty_Ns3DefaultDeleter__lt__ns3CallbackImplBase__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >::SimpleRefCount(ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter< ns3::CallbackImplBase > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3TraceSourceAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3TraceSourceAccessor__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >::SimpleRefCount(ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter< ns3::TraceSourceAccessor > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3Time_methods(root_module, cls):
cls.add_binary_numeric_operator('+', root_module['ns3::Time'], root_module['ns3::Time'], param('ns3::Time const &', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::Time'], root_module['ns3::Time'], param('ns3::Time const &', 'right'))
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('>')
cls.add_binary_comparison_operator('!=')
cls.add_inplace_numeric_operator('+=', param('ns3::Time const &', 'right'))
cls.add_inplace_numeric_operator('-=', param('ns3::Time const &', 'right'))
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('<=')
cls.add_binary_comparison_operator('==')
cls.add_binary_comparison_operator('>=')
## nstime.h (module 'core'): ns3::Time::Time() [constructor]
cls.add_constructor([])
## nstime.h (module 'core'): ns3::Time::Time(ns3::Time const & o) [copy constructor]
cls.add_constructor([param('ns3::Time const &', 'o')])
## nstime.h (module 'core'): ns3::Time::Time(double v) [constructor]
cls.add_constructor([param('double', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(int v) [constructor]
cls.add_constructor([param('int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(long int v) [constructor]
cls.add_constructor([param('long int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(long long int v) [constructor]
cls.add_constructor([param('long long int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(unsigned int v) [constructor]
cls.add_constructor([param('unsigned int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(long unsigned int v) [constructor]
cls.add_constructor([param('long unsigned int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(long long unsigned int v) [constructor]
cls.add_constructor([param('long long unsigned int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(std::string const & s) [constructor]
cls.add_constructor([param('std::string const &', 's')])
## nstime.h (module 'core'): ns3::Time::Time(ns3::int64x64_t const & value) [constructor]
cls.add_constructor([param('ns3::int64x64_t const &', 'value')])
## nstime.h (module 'core'): int ns3::Time::Compare(ns3::Time const & o) const [member function]
cls.add_method('Compare',
'int',
[param('ns3::Time const &', 'o')],
is_const=True)
## nstime.h (module 'core'): static ns3::Time ns3::Time::From(ns3::int64x64_t const & from, ns3::Time::Unit timeUnit) [member function]
cls.add_method('From',
'ns3::Time',
[param('ns3::int64x64_t const &', 'from'), param('ns3::Time::Unit', 'timeUnit')],
is_static=True)
## nstime.h (module 'core'): static ns3::Time ns3::Time::From(ns3::int64x64_t const & value) [member function]
cls.add_method('From',
'ns3::Time',
[param('ns3::int64x64_t const &', 'value')],
is_static=True)
## nstime.h (module 'core'): static ns3::Time ns3::Time::FromDouble(double value, ns3::Time::Unit timeUnit) [member function]
cls.add_method('FromDouble',
'ns3::Time',
[param('double', 'value'), param('ns3::Time::Unit', 'timeUnit')],
is_static=True)
## nstime.h (module 'core'): static ns3::Time ns3::Time::FromInteger(uint64_t value, ns3::Time::Unit timeUnit) [member function]
cls.add_method('FromInteger',
'ns3::Time',
[param('uint64_t', 'value'), param('ns3::Time::Unit', 'timeUnit')],
is_static=True)
## nstime.h (module 'core'): double ns3::Time::GetDouble() const [member function]
cls.add_method('GetDouble',
'double',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetFemtoSeconds() const [member function]
cls.add_method('GetFemtoSeconds',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetInteger() const [member function]
cls.add_method('GetInteger',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetMicroSeconds() const [member function]
cls.add_method('GetMicroSeconds',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetMilliSeconds() const [member function]
cls.add_method('GetMilliSeconds',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetNanoSeconds() const [member function]
cls.add_method('GetNanoSeconds',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetPicoSeconds() const [member function]
cls.add_method('GetPicoSeconds',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): static ns3::Time::Unit ns3::Time::GetResolution() [member function]
cls.add_method('GetResolution',
'ns3::Time::Unit',
[],
is_static=True)
## nstime.h (module 'core'): double ns3::Time::GetSeconds() const [member function]
cls.add_method('GetSeconds',
'double',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetTimeStep() const [member function]
cls.add_method('GetTimeStep',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): bool ns3::Time::IsNegative() const [member function]
cls.add_method('IsNegative',
'bool',
[],
is_const=True)
## nstime.h (module 'core'): bool ns3::Time::IsPositive() const [member function]
cls.add_method('IsPositive',
'bool',
[],
is_const=True)
## nstime.h (module 'core'): bool ns3::Time::IsStrictlyNegative() const [member function]
cls.add_method('IsStrictlyNegative',
'bool',
[],
is_const=True)
## nstime.h (module 'core'): bool ns3::Time::IsStrictlyPositive() const [member function]
cls.add_method('IsStrictlyPositive',
'bool',
[],
is_const=True)
## nstime.h (module 'core'): bool ns3::Time::IsZero() const [member function]
cls.add_method('IsZero',
'bool',
[],
is_const=True)
## nstime.h (module 'core'): static void ns3::Time::SetResolution(ns3::Time::Unit resolution) [member function]
cls.add_method('SetResolution',
'void',
[param('ns3::Time::Unit', 'resolution')],
is_static=True)
## nstime.h (module 'core'): ns3::int64x64_t ns3::Time::To(ns3::Time::Unit timeUnit) const [member function]
cls.add_method('To',
'ns3::int64x64_t',
[param('ns3::Time::Unit', 'timeUnit')],
is_const=True)
## nstime.h (module 'core'): double ns3::Time::ToDouble(ns3::Time::Unit timeUnit) const [member function]
cls.add_method('ToDouble',
'double',
[param('ns3::Time::Unit', 'timeUnit')],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::ToInteger(ns3::Time::Unit timeUnit) const [member function]
cls.add_method('ToInteger',
'int64_t',
[param('ns3::Time::Unit', 'timeUnit')],
is_const=True)
return
def register_Ns3TraceSourceAccessor_methods(root_module, cls):
## trace-source-accessor.h (module 'core'): ns3::TraceSourceAccessor::TraceSourceAccessor(ns3::TraceSourceAccessor const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TraceSourceAccessor const &', 'arg0')])
## trace-source-accessor.h (module 'core'): ns3::TraceSourceAccessor::TraceSourceAccessor() [constructor]
cls.add_constructor([])
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::Connect(ns3::ObjectBase * obj, std::string context, ns3::CallbackBase const & cb) const [member function]
cls.add_method('Connect',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::ConnectWithoutContext(ns3::ObjectBase * obj, ns3::CallbackBase const & cb) const [member function]
cls.add_method('ConnectWithoutContext',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::Disconnect(ns3::ObjectBase * obj, std::string context, ns3::CallbackBase const & cb) const [member function]
cls.add_method('Disconnect',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::DisconnectWithoutContext(ns3::ObjectBase * obj, ns3::CallbackBase const & cb) const [member function]
cls.add_method('DisconnectWithoutContext',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3AttributeAccessor_methods(root_module, cls):
## attribute.h (module 'core'): ns3::AttributeAccessor::AttributeAccessor(ns3::AttributeAccessor const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeAccessor const &', 'arg0')])
## attribute.h (module 'core'): ns3::AttributeAccessor::AttributeAccessor() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): bool ns3::AttributeAccessor::Get(ns3::ObjectBase const * object, ns3::AttributeValue & attribute) const [member function]
cls.add_method('Get',
'bool',
[param('ns3::ObjectBase const *', 'object'), param('ns3::AttributeValue &', 'attribute')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeAccessor::HasGetter() const [member function]
cls.add_method('HasGetter',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeAccessor::HasSetter() const [member function]
cls.add_method('HasSetter',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeAccessor::Set(ns3::ObjectBase * object, ns3::AttributeValue const & value) const [member function]
cls.add_method('Set',
'bool',
[param('ns3::ObjectBase *', 'object', transfer_ownership=False), param('ns3::AttributeValue const &', 'value')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3AttributeChecker_methods(root_module, cls):
## attribute.h (module 'core'): ns3::AttributeChecker::AttributeChecker(ns3::AttributeChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeChecker const &', 'arg0')])
## attribute.h (module 'core'): ns3::AttributeChecker::AttributeChecker() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): bool ns3::AttributeChecker::Check(ns3::AttributeValue const & value) const [member function]
cls.add_method('Check',
'bool',
[param('ns3::AttributeValue const &', 'value')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeChecker::Copy(ns3::AttributeValue const & source, ns3::AttributeValue & destination) const [member function]
cls.add_method('Copy',
'bool',
[param('ns3::AttributeValue const &', 'source'), param('ns3::AttributeValue &', 'destination')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeChecker::Create() const [member function]
cls.add_method('Create',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeChecker::CreateValidValue(ns3::AttributeValue const & value) const [member function]
cls.add_method('CreateValidValue',
'ns3::Ptr< ns3::AttributeValue >',
[param('ns3::AttributeValue const &', 'value')],
is_const=True)
## attribute.h (module 'core'): std::string ns3::AttributeChecker::GetUnderlyingTypeInformation() const [member function]
cls.add_method('GetUnderlyingTypeInformation',
'std::string',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): std::string ns3::AttributeChecker::GetValueTypeName() const [member function]
cls.add_method('GetValueTypeName',
'std::string',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeChecker::HasUnderlyingTypeInformation() const [member function]
cls.add_method('HasUnderlyingTypeInformation',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3AttributeValue_methods(root_module, cls):
## attribute.h (module 'core'): ns3::AttributeValue::AttributeValue(ns3::AttributeValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeValue const &', 'arg0')])
## attribute.h (module 'core'): ns3::AttributeValue::AttributeValue() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_pure_virtual=True, is_virtual=True)
## attribute.h (module 'core'): std::string ns3::AttributeValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3CallbackChecker_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackChecker::CallbackChecker() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackChecker::CallbackChecker(ns3::CallbackChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::CallbackChecker const &', 'arg0')])
return
def register_Ns3CallbackImplBase_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackImplBase::CallbackImplBase() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackImplBase::CallbackImplBase(ns3::CallbackImplBase const & arg0) [copy constructor]
cls.add_constructor([param('ns3::CallbackImplBase const &', 'arg0')])
## callback.h (module 'core'): bool ns3::CallbackImplBase::IsEqual(ns3::Ptr<ns3::CallbackImplBase const> other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ptr< ns3::CallbackImplBase const >', 'other')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3CallbackValue_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackValue::CallbackValue(ns3::CallbackValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::CallbackValue const &', 'arg0')])
## callback.h (module 'core'): ns3::CallbackValue::CallbackValue() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackValue::CallbackValue(ns3::CallbackBase const & base) [constructor]
cls.add_constructor([param('ns3::CallbackBase const &', 'base')])
## callback.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::CallbackValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## callback.h (module 'core'): bool ns3::CallbackValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## callback.h (module 'core'): std::string ns3::CallbackValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## callback.h (module 'core'): void ns3::CallbackValue::Set(ns3::CallbackBase base) [member function]
cls.add_method('Set',
'void',
[param('ns3::CallbackBase', 'base')])
return
def register_Ns3Channel_methods(root_module, cls):
## channel.h (module 'network'): ns3::Channel::Channel(ns3::Channel const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Channel const &', 'arg0')])
## channel.h (module 'network'): ns3::Channel::Channel() [constructor]
cls.add_constructor([])
## channel.h (module 'network'): ns3::Ptr<ns3::NetDevice> ns3::Channel::GetDevice(uint32_t i) const [member function]
cls.add_method('GetDevice',
'ns3::Ptr< ns3::NetDevice >',
[param('uint32_t', 'i')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## channel.h (module 'network'): uint32_t ns3::Channel::GetId() const [member function]
cls.add_method('GetId',
'uint32_t',
[],
is_const=True)
## channel.h (module 'network'): uint32_t ns3::Channel::GetNDevices() const [member function]
cls.add_method('GetNDevices',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## channel.h (module 'network'): static ns3::TypeId ns3::Channel::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
return
def register_Ns3EmptyAttributeValue_methods(root_module, cls):
## attribute.h (module 'core'): ns3::EmptyAttributeValue::EmptyAttributeValue(ns3::EmptyAttributeValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::EmptyAttributeValue const &', 'arg0')])
## attribute.h (module 'core'): ns3::EmptyAttributeValue::EmptyAttributeValue() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::EmptyAttributeValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, visibility='private', is_virtual=True)
## attribute.h (module 'core'): bool ns3::EmptyAttributeValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
visibility='private', is_virtual=True)
## attribute.h (module 'core'): std::string ns3::EmptyAttributeValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, visibility='private', is_virtual=True)
return
def register_Ns3Ipv4AddressChecker_methods(root_module, cls):
## ipv4-address.h (module 'network'): ns3::Ipv4AddressChecker::Ipv4AddressChecker() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressChecker::Ipv4AddressChecker(ns3::Ipv4AddressChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4AddressChecker const &', 'arg0')])
return
def register_Ns3Ipv4AddressValue_methods(root_module, cls):
## ipv4-address.h (module 'network'): ns3::Ipv4AddressValue::Ipv4AddressValue() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressValue::Ipv4AddressValue(ns3::Ipv4AddressValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4AddressValue const &', 'arg0')])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressValue::Ipv4AddressValue(ns3::Ipv4Address const & value) [constructor]
cls.add_constructor([param('ns3::Ipv4Address const &', 'value')])
## ipv4-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Ipv4AddressValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4AddressValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## ipv4-address.h (module 'network'): ns3::Ipv4Address ns3::Ipv4AddressValue::Get() const [member function]
cls.add_method('Get',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv4-address.h (module 'network'): std::string ns3::Ipv4AddressValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4AddressValue::Set(ns3::Ipv4Address const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Ipv4Address const &', 'value')])
return
def register_Ns3Ipv4MaskChecker_methods(root_module, cls):
## ipv4-address.h (module 'network'): ns3::Ipv4MaskChecker::Ipv4MaskChecker() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4MaskChecker::Ipv4MaskChecker(ns3::Ipv4MaskChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4MaskChecker const &', 'arg0')])
return
def register_Ns3Ipv4MaskValue_methods(root_module, cls):
## ipv4-address.h (module 'network'): ns3::Ipv4MaskValue::Ipv4MaskValue() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4MaskValue::Ipv4MaskValue(ns3::Ipv4MaskValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4MaskValue const &', 'arg0')])
## ipv4-address.h (module 'network'): ns3::Ipv4MaskValue::Ipv4MaskValue(ns3::Ipv4Mask const & value) [constructor]
cls.add_constructor([param('ns3::Ipv4Mask const &', 'value')])
## ipv4-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Ipv4MaskValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4MaskValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## ipv4-address.h (module 'network'): ns3::Ipv4Mask ns3::Ipv4MaskValue::Get() const [member function]
cls.add_method('Get',
'ns3::Ipv4Mask',
[],
is_const=True)
## ipv4-address.h (module 'network'): std::string ns3::Ipv4MaskValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4MaskValue::Set(ns3::Ipv4Mask const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Ipv4Mask const &', 'value')])
return
def register_Ns3Ipv6AddressChecker_methods(root_module, cls):
## ipv6-address.h (module 'network'): ns3::Ipv6AddressChecker::Ipv6AddressChecker() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressChecker::Ipv6AddressChecker(ns3::Ipv6AddressChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv6AddressChecker const &', 'arg0')])
return
def register_Ns3Ipv6AddressValue_methods(root_module, cls):
## ipv6-address.h (module 'network'): ns3::Ipv6AddressValue::Ipv6AddressValue() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressValue::Ipv6AddressValue(ns3::Ipv6AddressValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv6AddressValue const &', 'arg0')])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressValue::Ipv6AddressValue(ns3::Ipv6Address const & value) [constructor]
cls.add_constructor([param('ns3::Ipv6Address const &', 'value')])
## ipv6-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Ipv6AddressValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6AddressValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## ipv6-address.h (module 'network'): ns3::Ipv6Address ns3::Ipv6AddressValue::Get() const [member function]
cls.add_method('Get',
'ns3::Ipv6Address',
[],
is_const=True)
## ipv6-address.h (module 'network'): std::string ns3::Ipv6AddressValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6AddressValue::Set(ns3::Ipv6Address const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Ipv6Address const &', 'value')])
return
def register_Ns3Ipv6PrefixChecker_methods(root_module, cls):
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixChecker::Ipv6PrefixChecker() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixChecker::Ipv6PrefixChecker(ns3::Ipv6PrefixChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv6PrefixChecker const &', 'arg0')])
return
def register_Ns3Ipv6PrefixValue_methods(root_module, cls):
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixValue::Ipv6PrefixValue() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixValue::Ipv6PrefixValue(ns3::Ipv6PrefixValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv6PrefixValue const &', 'arg0')])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixValue::Ipv6PrefixValue(ns3::Ipv6Prefix const & value) [constructor]
cls.add_constructor([param('ns3::Ipv6Prefix const &', 'value')])
## ipv6-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Ipv6PrefixValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6PrefixValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix ns3::Ipv6PrefixValue::Get() const [member function]
cls.add_method('Get',
'ns3::Ipv6Prefix',
[],
is_const=True)
## ipv6-address.h (module 'network'): std::string ns3::Ipv6PrefixValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6PrefixValue::Set(ns3::Ipv6Prefix const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Ipv6Prefix const &', 'value')])
return
def register_Ns3Mac48AddressChecker_methods(root_module, cls):
## mac48-address.h (module 'network'): ns3::Mac48AddressChecker::Mac48AddressChecker() [constructor]
cls.add_constructor([])
## mac48-address.h (module 'network'): ns3::Mac48AddressChecker::Mac48AddressChecker(ns3::Mac48AddressChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Mac48AddressChecker const &', 'arg0')])
return
def register_Ns3Mac48AddressValue_methods(root_module, cls):
## mac48-address.h (module 'network'): ns3::Mac48AddressValue::Mac48AddressValue() [constructor]
cls.add_constructor([])
## mac48-address.h (module 'network'): ns3::Mac48AddressValue::Mac48AddressValue(ns3::Mac48AddressValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Mac48AddressValue const &', 'arg0')])
## mac48-address.h (module 'network'): ns3::Mac48AddressValue::Mac48AddressValue(ns3::Mac48Address const & value) [constructor]
cls.add_constructor([param('ns3::Mac48Address const &', 'value')])
## mac48-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Mac48AddressValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## mac48-address.h (module 'network'): bool ns3::Mac48AddressValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## mac48-address.h (module 'network'): ns3::Mac48Address ns3::Mac48AddressValue::Get() const [member function]
cls.add_method('Get',
'ns3::Mac48Address',
[],
is_const=True)
## mac48-address.h (module 'network'): std::string ns3::Mac48AddressValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## mac48-address.h (module 'network'): void ns3::Mac48AddressValue::Set(ns3::Mac48Address const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Mac48Address const &', 'value')])
return
def register_Ns3NetDevice_methods(root_module, cls):
## net-device.h (module 'network'): ns3::NetDevice::NetDevice() [constructor]
cls.add_constructor([])
## net-device.h (module 'network'): ns3::NetDevice::NetDevice(ns3::NetDevice const & arg0) [copy constructor]
cls.add_constructor([param('ns3::NetDevice const &', 'arg0')])
## net-device.h (module 'network'): void ns3::NetDevice::AddLinkChangeCallback(ns3::Callback<void,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> callback) [member function]
cls.add_method('AddLinkChangeCallback',
'void',
[param('ns3::Callback< void, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'callback')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Address ns3::NetDevice::GetAddress() const [member function]
cls.add_method('GetAddress',
'ns3::Address',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Address ns3::NetDevice::GetBroadcast() const [member function]
cls.add_method('GetBroadcast',
'ns3::Address',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Ptr<ns3::Channel> ns3::NetDevice::GetChannel() const [member function]
cls.add_method('GetChannel',
'ns3::Ptr< ns3::Channel >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): uint32_t ns3::NetDevice::GetIfIndex() const [member function]
cls.add_method('GetIfIndex',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): uint16_t ns3::NetDevice::GetMtu() const [member function]
cls.add_method('GetMtu',
'uint16_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Address ns3::NetDevice::GetMulticast(ns3::Ipv4Address multicastGroup) const [member function]
cls.add_method('GetMulticast',
'ns3::Address',
[param('ns3::Ipv4Address', 'multicastGroup')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Address ns3::NetDevice::GetMulticast(ns3::Ipv6Address addr) const [member function]
cls.add_method('GetMulticast',
'ns3::Address',
[param('ns3::Ipv6Address', 'addr')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Ptr<ns3::Node> ns3::NetDevice::GetNode() const [member function]
cls.add_method('GetNode',
'ns3::Ptr< ns3::Node >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): static ns3::TypeId ns3::NetDevice::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsBridge() const [member function]
cls.add_method('IsBridge',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsBroadcast() const [member function]
cls.add_method('IsBroadcast',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsLinkUp() const [member function]
cls.add_method('IsLinkUp',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsMulticast() const [member function]
cls.add_method('IsMulticast',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsPointToPoint() const [member function]
cls.add_method('IsPointToPoint',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::NeedsArp() const [member function]
cls.add_method('NeedsArp',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::Send(ns3::Ptr<ns3::Packet> packet, ns3::Address const & dest, uint16_t protocolNumber) [member function]
cls.add_method('Send',
'bool',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::Address const &', 'dest'), param('uint16_t', 'protocolNumber')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::SendFrom(ns3::Ptr<ns3::Packet> packet, ns3::Address const & source, ns3::Address const & dest, uint16_t protocolNumber) [member function]
cls.add_method('SendFrom',
'bool',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::Address const &', 'source'), param('ns3::Address const &', 'dest'), param('uint16_t', 'protocolNumber')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetAddress(ns3::Address address) [member function]
cls.add_method('SetAddress',
'void',
[param('ns3::Address', 'address')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetIfIndex(uint32_t const index) [member function]
cls.add_method('SetIfIndex',
'void',
[param('uint32_t const', 'index')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::SetMtu(uint16_t const mtu) [member function]
cls.add_method('SetMtu',
'bool',
[param('uint16_t const', 'mtu')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetNode(ns3::Ptr<ns3::Node> node) [member function]
cls.add_method('SetNode',
'void',
[param('ns3::Ptr< ns3::Node >', 'node')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetPromiscReceiveCallback(ns3::Callback<bool, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<ns3::Packet const>, unsigned short, ns3::Address const&, ns3::Address const&, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty> cb) [member function]
cls.add_method('SetPromiscReceiveCallback',
'void',
[param('ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >', 'cb')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetReceiveCallback(ns3::Callback<bool, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<ns3::Packet const>, unsigned short, ns3::Address const&, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> cb) [member function]
cls.add_method('SetReceiveCallback',
'void',
[param('ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'cb')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::SupportsSendFrom() const [member function]
cls.add_method('SupportsSendFrom',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3ObjectFactoryChecker_methods(root_module, cls):
## object-factory.h (module 'core'): ns3::ObjectFactoryChecker::ObjectFactoryChecker() [constructor]
cls.add_constructor([])
## object-factory.h (module 'core'): ns3::ObjectFactoryChecker::ObjectFactoryChecker(ns3::ObjectFactoryChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ObjectFactoryChecker const &', 'arg0')])
return
def register_Ns3ObjectFactoryValue_methods(root_module, cls):
## object-factory.h (module 'core'): ns3::ObjectFactoryValue::ObjectFactoryValue() [constructor]
cls.add_constructor([])
## object-factory.h (module 'core'): ns3::ObjectFactoryValue::ObjectFactoryValue(ns3::ObjectFactoryValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ObjectFactoryValue const &', 'arg0')])
## object-factory.h (module 'core'): ns3::ObjectFactoryValue::ObjectFactoryValue(ns3::ObjectFactory const & value) [constructor]
cls.add_constructor([param('ns3::ObjectFactory const &', 'value')])
## object-factory.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::ObjectFactoryValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## object-factory.h (module 'core'): bool ns3::ObjectFactoryValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## object-factory.h (module 'core'): ns3::ObjectFactory ns3::ObjectFactoryValue::Get() const [member function]
cls.add_method('Get',
'ns3::ObjectFactory',
[],
is_const=True)
## object-factory.h (module 'core'): std::string ns3::ObjectFactoryValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## object-factory.h (module 'core'): void ns3::ObjectFactoryValue::Set(ns3::ObjectFactory const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::ObjectFactory const &', 'value')])
return
def register_Ns3TimeChecker_methods(root_module, cls):
## nstime.h (module 'core'): ns3::TimeChecker::TimeChecker() [constructor]
cls.add_constructor([])
## nstime.h (module 'core'): ns3::TimeChecker::TimeChecker(ns3::TimeChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TimeChecker const &', 'arg0')])
return
def register_Ns3TimeValue_methods(root_module, cls):
## nstime.h (module 'core'): ns3::TimeValue::TimeValue() [constructor]
cls.add_constructor([])
## nstime.h (module 'core'): ns3::TimeValue::TimeValue(ns3::TimeValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TimeValue const &', 'arg0')])
## nstime.h (module 'core'): ns3::TimeValue::TimeValue(ns3::Time const & value) [constructor]
cls.add_constructor([param('ns3::Time const &', 'value')])
## nstime.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::TimeValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## nstime.h (module 'core'): bool ns3::TimeValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## nstime.h (module 'core'): ns3::Time ns3::TimeValue::Get() const [member function]
cls.add_method('Get',
'ns3::Time',
[],
is_const=True)
## nstime.h (module 'core'): std::string ns3::TimeValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## nstime.h (module 'core'): void ns3::TimeValue::Set(ns3::Time const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Time const &', 'value')])
return
def register_Ns3TypeIdChecker_methods(root_module, cls):
## type-id.h (module 'core'): ns3::TypeIdChecker::TypeIdChecker() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeIdChecker::TypeIdChecker(ns3::TypeIdChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TypeIdChecker const &', 'arg0')])
return
def register_Ns3TypeIdValue_methods(root_module, cls):
## type-id.h (module 'core'): ns3::TypeIdValue::TypeIdValue() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeIdValue::TypeIdValue(ns3::TypeIdValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TypeIdValue const &', 'arg0')])
## type-id.h (module 'core'): ns3::TypeIdValue::TypeIdValue(ns3::TypeId const & value) [constructor]
cls.add_constructor([param('ns3::TypeId const &', 'value')])
## type-id.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::TypeIdValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## type-id.h (module 'core'): bool ns3::TypeIdValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## type-id.h (module 'core'): ns3::TypeId ns3::TypeIdValue::Get() const [member function]
cls.add_method('Get',
'ns3::TypeId',
[],
is_const=True)
## type-id.h (module 'core'): std::string ns3::TypeIdValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## type-id.h (module 'core'): void ns3::TypeIdValue::Set(ns3::TypeId const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::TypeId const &', 'value')])
return
def register_Ns3AddressChecker_methods(root_module, cls):
## address.h (module 'network'): ns3::AddressChecker::AddressChecker() [constructor]
cls.add_constructor([])
## address.h (module 'network'): ns3::AddressChecker::AddressChecker(ns3::AddressChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AddressChecker const &', 'arg0')])
return
def register_Ns3AddressValue_methods(root_module, cls):
## address.h (module 'network'): ns3::AddressValue::AddressValue() [constructor]
cls.add_constructor([])
## address.h (module 'network'): ns3::AddressValue::AddressValue(ns3::AddressValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AddressValue const &', 'arg0')])
## address.h (module 'network'): ns3::AddressValue::AddressValue(ns3::Address const & value) [constructor]
cls.add_constructor([param('ns3::Address const &', 'value')])
## address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::AddressValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## address.h (module 'network'): bool ns3::AddressValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## address.h (module 'network'): ns3::Address ns3::AddressValue::Get() const [member function]
cls.add_method('Get',
'ns3::Address',
[],
is_const=True)
## address.h (module 'network'): std::string ns3::AddressValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## address.h (module 'network'): void ns3::AddressValue::Set(ns3::Address const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Address const &', 'value')])
return
def register_Ns3BridgeChannel_methods(root_module, cls):
## bridge-channel.h (module 'bridge'): static ns3::TypeId ns3::BridgeChannel::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## bridge-channel.h (module 'bridge'): ns3::BridgeChannel::BridgeChannel() [constructor]
cls.add_constructor([])
## bridge-channel.h (module 'bridge'): void ns3::BridgeChannel::AddChannel(ns3::Ptr<ns3::Channel> bridgedChannel) [member function]
cls.add_method('AddChannel',
'void',
[param('ns3::Ptr< ns3::Channel >', 'bridgedChannel')])
## bridge-channel.h (module 'bridge'): uint32_t ns3::BridgeChannel::GetNDevices() const [member function]
cls.add_method('GetNDevices',
'uint32_t',
[],
is_const=True, is_virtual=True)
## bridge-channel.h (module 'bridge'): ns3::Ptr<ns3::NetDevice> ns3::BridgeChannel::GetDevice(uint32_t i) const [member function]
cls.add_method('GetDevice',
'ns3::Ptr< ns3::NetDevice >',
[param('uint32_t', 'i')],
is_const=True, is_virtual=True)
return
def register_Ns3BridgeNetDevice_methods(root_module, cls):
## bridge-net-device.h (module 'bridge'): ns3::BridgeNetDevice::BridgeNetDevice() [constructor]
cls.add_constructor([])
## bridge-net-device.h (module 'bridge'): void ns3::BridgeNetDevice::AddBridgePort(ns3::Ptr<ns3::NetDevice> bridgePort) [member function]
cls.add_method('AddBridgePort',
'void',
[param('ns3::Ptr< ns3::NetDevice >', 'bridgePort')])
## bridge-net-device.h (module 'bridge'): void ns3::BridgeNetDevice::AddLinkChangeCallback(ns3::Callback<void,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> callback) [member function]
cls.add_method('AddLinkChangeCallback',
'void',
[param('ns3::Callback< void, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'callback')],
is_virtual=True)
## bridge-net-device.h (module 'bridge'): ns3::Address ns3::BridgeNetDevice::GetAddress() const [member function]
cls.add_method('GetAddress',
'ns3::Address',
[],
is_const=True, is_virtual=True)
## bridge-net-device.h (module 'bridge'): ns3::Ptr<ns3::NetDevice> ns3::BridgeNetDevice::GetBridgePort(uint32_t n) const [member function]
cls.add_method('GetBridgePort',
'ns3::Ptr< ns3::NetDevice >',
[param('uint32_t', 'n')],
is_const=True)
## bridge-net-device.h (module 'bridge'): ns3::Address ns3::BridgeNetDevice::GetBroadcast() const [member function]
cls.add_method('GetBroadcast',
'ns3::Address',
[],
is_const=True, is_virtual=True)
## bridge-net-device.h (module 'bridge'): ns3::Ptr<ns3::Channel> ns3::BridgeNetDevice::GetChannel() const [member function]
cls.add_method('GetChannel',
'ns3::Ptr< ns3::Channel >',
[],
is_const=True, is_virtual=True)
## bridge-net-device.h (module 'bridge'): uint32_t ns3::BridgeNetDevice::GetIfIndex() const [member function]
cls.add_method('GetIfIndex',
'uint32_t',
[],
is_const=True, is_virtual=True)
## bridge-net-device.h (module 'bridge'): uint16_t ns3::BridgeNetDevice::GetMtu() const [member function]
cls.add_method('GetMtu',
'uint16_t',
[],
is_const=True, is_virtual=True)
## bridge-net-device.h (module 'bridge'): ns3::Address ns3::BridgeNetDevice::GetMulticast(ns3::Ipv4Address multicastGroup) const [member function]
cls.add_method('GetMulticast',
'ns3::Address',
[param('ns3::Ipv4Address', 'multicastGroup')],
is_const=True, is_virtual=True)
## bridge-net-device.h (module 'bridge'): ns3::Address ns3::BridgeNetDevice::GetMulticast(ns3::Ipv6Address addr) const [member function]
cls.add_method('GetMulticast',
'ns3::Address',
[param('ns3::Ipv6Address', 'addr')],
is_const=True, is_virtual=True)
## bridge-net-device.h (module 'bridge'): uint32_t ns3::BridgeNetDevice::GetNBridgePorts() const [member function]
cls.add_method('GetNBridgePorts',
'uint32_t',
[],
is_const=True)
## bridge-net-device.h (module 'bridge'): ns3::Ptr<ns3::Node> ns3::BridgeNetDevice::GetNode() const [member function]
cls.add_method('GetNode',
'ns3::Ptr< ns3::Node >',
[],
is_const=True, is_virtual=True)
## bridge-net-device.h (module 'bridge'): static ns3::TypeId ns3::BridgeNetDevice::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## bridge-net-device.h (module 'bridge'): bool ns3::BridgeNetDevice::IsBridge() const [member function]
cls.add_method('IsBridge',
'bool',
[],
is_const=True, is_virtual=True)
## bridge-net-device.h (module 'bridge'): bool ns3::BridgeNetDevice::IsBroadcast() const [member function]
cls.add_method('IsBroadcast',
'bool',
[],
is_const=True, is_virtual=True)
## bridge-net-device.h (module 'bridge'): bool ns3::BridgeNetDevice::IsLinkUp() const [member function]
cls.add_method('IsLinkUp',
'bool',
[],
is_const=True, is_virtual=True)
## bridge-net-device.h (module 'bridge'): bool ns3::BridgeNetDevice::IsMulticast() const [member function]
cls.add_method('IsMulticast',
'bool',
[],
is_const=True, is_virtual=True)
## bridge-net-device.h (module 'bridge'): bool ns3::BridgeNetDevice::IsPointToPoint() const [member function]
cls.add_method('IsPointToPoint',
'bool',
[],
is_const=True, is_virtual=True)
## bridge-net-device.h (module 'bridge'): bool ns3::BridgeNetDevice::NeedsArp() const [member function]
cls.add_method('NeedsArp',
'bool',
[],
is_const=True, is_virtual=True)
## bridge-net-device.h (module 'bridge'): bool ns3::BridgeNetDevice::Send(ns3::Ptr<ns3::Packet> packet, ns3::Address const & dest, uint16_t protocolNumber) [member function]
cls.add_method('Send',
'bool',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::Address const &', 'dest'), param('uint16_t', 'protocolNumber')],
is_virtual=True)
## bridge-net-device.h (module 'bridge'): bool ns3::BridgeNetDevice::SendFrom(ns3::Ptr<ns3::Packet> packet, ns3::Address const & source, ns3::Address const & dest, uint16_t protocolNumber) [member function]
cls.add_method('SendFrom',
'bool',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::Address const &', 'source'), param('ns3::Address const &', 'dest'), param('uint16_t', 'protocolNumber')],
is_virtual=True)
## bridge-net-device.h (module 'bridge'): void ns3::BridgeNetDevice::SetAddress(ns3::Address address) [member function]
cls.add_method('SetAddress',
'void',
[param('ns3::Address', 'address')],
is_virtual=True)
## bridge-net-device.h (module 'bridge'): void ns3::BridgeNetDevice::SetIfIndex(uint32_t const index) [member function]
cls.add_method('SetIfIndex',
'void',
[param('uint32_t const', 'index')],
is_virtual=True)
## bridge-net-device.h (module 'bridge'): bool ns3::BridgeNetDevice::SetMtu(uint16_t const mtu) [member function]
cls.add_method('SetMtu',
'bool',
[param('uint16_t const', 'mtu')],
is_virtual=True)
## bridge-net-device.h (module 'bridge'): void ns3::BridgeNetDevice::SetNode(ns3::Ptr<ns3::Node> node) [member function]
cls.add_method('SetNode',
'void',
[param('ns3::Ptr< ns3::Node >', 'node')],
is_virtual=True)
## bridge-net-device.h (module 'bridge'): void ns3::BridgeNetDevice::SetPromiscReceiveCallback(ns3::Callback<bool, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<ns3::Packet const>, unsigned short, ns3::Address const&, ns3::Address const&, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty> cb) [member function]
cls.add_method('SetPromiscReceiveCallback',
'void',
[param('ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >', 'cb')],
is_virtual=True)
## bridge-net-device.h (module 'bridge'): void ns3::BridgeNetDevice::SetReceiveCallback(ns3::Callback<bool, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<ns3::Packet const>, unsigned short, ns3::Address const&, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> cb) [member function]
cls.add_method('SetReceiveCallback',
'void',
[param('ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'cb')],
is_virtual=True)
## bridge-net-device.h (module 'bridge'): bool ns3::BridgeNetDevice::SupportsSendFrom() const [member function]
cls.add_method('SupportsSendFrom',
'bool',
[],
is_const=True, is_virtual=True)
## bridge-net-device.h (module 'bridge'): void ns3::BridgeNetDevice::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
## bridge-net-device.h (module 'bridge'): void ns3::BridgeNetDevice::ForwardBroadcast(ns3::Ptr<ns3::NetDevice> incomingPort, ns3::Ptr<const ns3::Packet> packet, uint16_t protocol, ns3::Mac48Address src, ns3::Mac48Address dst) [member function]
cls.add_method('ForwardBroadcast',
'void',
[param('ns3::Ptr< ns3::NetDevice >', 'incomingPort'), param('ns3::Ptr< ns3::Packet const >', 'packet'), param('uint16_t', 'protocol'), param('ns3::Mac48Address', 'src'), param('ns3::Mac48Address', 'dst')],
visibility='protected')
## bridge-net-device.h (module 'bridge'): void ns3::BridgeNetDevice::ForwardUnicast(ns3::Ptr<ns3::NetDevice> incomingPort, ns3::Ptr<const ns3::Packet> packet, uint16_t protocol, ns3::Mac48Address src, ns3::Mac48Address dst) [member function]
cls.add_method('ForwardUnicast',
'void',
[param('ns3::Ptr< ns3::NetDevice >', 'incomingPort'), param('ns3::Ptr< ns3::Packet const >', 'packet'), param('uint16_t', 'protocol'), param('ns3::Mac48Address', 'src'), param('ns3::Mac48Address', 'dst')],
visibility='protected')
## bridge-net-device.h (module 'bridge'): ns3::Ptr<ns3::NetDevice> ns3::BridgeNetDevice::GetLearnedState(ns3::Mac48Address source) [member function]
cls.add_method('GetLearnedState',
'ns3::Ptr< ns3::NetDevice >',
[param('ns3::Mac48Address', 'source')],
visibility='protected')
## bridge-net-device.h (module 'bridge'): void ns3::BridgeNetDevice::Learn(ns3::Mac48Address source, ns3::Ptr<ns3::NetDevice> port) [member function]
cls.add_method('Learn',
'void',
[param('ns3::Mac48Address', 'source'), param('ns3::Ptr< ns3::NetDevice >', 'port')],
visibility='protected')
## bridge-net-device.h (module 'bridge'): void ns3::BridgeNetDevice::ReceiveFromDevice(ns3::Ptr<ns3::NetDevice> device, ns3::Ptr<const ns3::Packet> packet, uint16_t protocol, ns3::Address const & source, ns3::Address const & destination, ns3::NetDevice::PacketType packetType) [member function]
cls.add_method('ReceiveFromDevice',
'void',
[param('ns3::Ptr< ns3::NetDevice >', 'device'), param('ns3::Ptr< ns3::Packet const >', 'packet'), param('uint16_t', 'protocol'), param('ns3::Address const &', 'source'), param('ns3::Address const &', 'destination'), param('ns3::NetDevice::PacketType', 'packetType')],
visibility='protected')
return
def register_functions(root_module):
module = root_module
register_functions_ns3_FatalImpl(module.get_submodule('FatalImpl'), root_module)
return
def register_functions_ns3_FatalImpl(module, root_module):
return
def main():
out = FileCodeSink(sys.stdout)
root_module = module_init()
register_types(root_module)
register_methods(root_module)
register_functions(root_module)
root_module.generate(out)
if __name__ == '__main__':
main()
| gpl-2.0 | 9,110,006,713,897,814,000 | 64.378546 | 381 | 0.61688 | false |
SUSE/kiwi | test/unit/package_manager/init_test.py | 1 | 1854 | from mock import (
patch, Mock
)
from pytest import raises
from kiwi.package_manager import PackageManager
from kiwi.exceptions import KiwiPackageManagerSetupError
class TestPackageManager:
def test_package_manager_not_implemented(self):
with raises(KiwiPackageManagerSetupError):
PackageManager.new('repository', 'ms-manager')
@patch('kiwi.package_manager.zypper.PackageManagerZypper')
def test_manager_zypper(self, mock_manager):
repository = Mock()
PackageManager.new(repository, 'zypper')
mock_manager.assert_called_once_with(repository, None)
@patch('kiwi.package_manager.dnf.PackageManagerDnf')
def test_manager_dnf(self, mock_manager):
repository = Mock()
PackageManager.new(repository, 'dnf')
mock_manager.assert_called_once_with(repository, None)
@patch('kiwi.package_manager.dnf.PackageManagerDnf')
def test_manager_yum(self, mock_manager):
repository = Mock()
PackageManager.new(repository, 'yum')
mock_manager.assert_called_once_with(repository, None)
@patch('kiwi.package_manager.microdnf.PackageManagerMicroDnf')
def test_manager_microdnf(self, mock_manager):
repository = Mock()
PackageManager.new(repository, 'microdnf')
mock_manager.assert_called_once_with(repository, None)
@patch('kiwi.package_manager.apt.PackageManagerApt')
def test_manager_apt(self, mock_manager):
repository = Mock()
PackageManager.new(repository, 'apt-get')
mock_manager.assert_called_once_with(repository, None)
@patch('kiwi.package_manager.pacman.PackageManagerPacman')
def test_manager_pacman(self, mock_manager):
repository = Mock()
PackageManager.new(repository, 'pacman')
mock_manager.assert_called_once_with(repository, None)
| gpl-3.0 | 1,259,736,474,447,080,400 | 36.08 | 66 | 0.704423 | false |
mxrrow/zaicoin | src/deps/boost/libs/python/pyste/dist/create_build.py | 54 | 1668 | # Copyright Bruno da Silva de Oliveira 2006. Distributed under the Boost
# Software License, Version 1.0. (See accompanying
# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
import os
import sys
import shutil
import fnmatch
from zipfile import ZipFile, ZIP_DEFLATED
def findfiles(directory, mask):
def visit(files, dir, names):
for name in names:
if fnmatch.fnmatch(name, mask):
files.append(os.path.join(dir, name))
files = []
os.path.walk(directory, visit, files)
return files
def main():
# test if PyXML is installed
try:
import _xmlplus.parsers.expat
pyxml = '--includes _xmlplus.parsers.expat'
except ImportError:
pyxml = ''
# create exe
status = os.system('python setup.py py2exe %s >& build.log' % pyxml)
if status != 0:
raise RuntimeError, 'Error creating EXE'
# create distribution
import pyste
version = pyste.__VERSION__
zip = ZipFile('pyste-%s.zip' % version, 'w', ZIP_DEFLATED)
# include the base files
dist_dir = 'dist/pyste'
for basefile in os.listdir(dist_dir):
zip.write(os.path.join(dist_dir, basefile), os.path.join('pyste', basefile))
# include documentation
for doc_file in findfiles('../doc', '*.*'):
dest_name = os.path.join('pyste/doc', doc_file[3:])
zip.write(doc_file, dest_name)
zip.write('../index.html', 'pyste/doc/index.html')
zip.close()
# cleanup
os.remove('build.log')
shutil.rmtree('build')
shutil.rmtree('dist')
if __name__ == '__main__':
sys.path.append('../src')
main()
| mit | -8,359,428,074,779,560,000 | 29.327273 | 84 | 0.615707 | false |
alibbaba/plugin.video.live.streamspro | plugin.video.live.streamspro/resources/lib/resolvers/cloudtime.py | 2 | 1502 | # -*- coding: utf-8 -*-
'''
Genesis Add-on
Copyright (C) 2015 lambda
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import re
from resources.lib.libraries import client
def resolve(url):
try:
id = re.compile('//.+?/.+?/([\w]+)').findall(url)
id += re.compile('//.+?/.+?v=([\w]+)').findall(url)
id = id[0]
url = 'http://embed.cloudtime.to/embed.php?v=%s' % id
result = client.request(url)
key = re.compile('flashvars.filekey=(.+?);').findall(result)[-1]
try: key = re.compile('\s+%s="(.+?)"' % key).findall(result)[-1]
except: pass
url = 'http://www.cloudtime.to/api/player.api.php?key=%s&file=%s' % (key, id)
result = client.request(url)
url = re.compile('url=(.+?)&').findall(result)[0]
return url
except:
return
| gpl-2.0 | 6,154,396,449,308,847,000 | 29.957447 | 85 | 0.601864 | false |
solintegra/addons | email_template/__init__.py | 381 | 1144 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2009 Sharoon Thomas
# Copyright (C) 2010-Today OpenERP SA (<http://www.openerp.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
#
##############################################################################
import email_template
import wizard
import res_partner
import ir_actions
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | 5,029,898,068,977,751,000 | 41.37037 | 78 | 0.628497 | false |
AIFDR/inasafe | safe/metadata35/property/boolean_property.py | 6 | 1694 | # -*- coding: utf-8 -*-
"""
InaSAFE Disaster risk assessment tool developed by AusAid -
**metadata module.**
Contact : [email protected]
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
__author__ = '[email protected]'
__revision__ = '$Format:%H$'
__date__ = '08/12/15'
__copyright__ = ('Copyright 2012, Australia Indonesia Facility for '
'Disaster Reduction')
import json
NoneType = type(None)
from safe.common.exceptions import MetadataCastError
from safe.metadata35.property import BaseProperty
class BooleanProperty(BaseProperty):
"""A property that accepts boolean."""
# if you edit this you need to adapt accordingly xml_value and is_valid
_allowed_python_types = [bool, NoneType]
def __init__(self, name, value, xml_path):
super(BooleanProperty, self).__init__(
name, value, xml_path, self._allowed_python_types)
@classmethod
def is_valid(cls, value):
return True
def cast_from_str(self, value):
try:
return bool(int(value))
except ValueError as e:
raise MetadataCastError(e)
@property
def xml_value(self):
if self.python_type is bool:
return str(int(self.value))
elif self.python_type is NoneType:
return ''
else:
raise RuntimeError('self._allowed_python_types and self.xml_value'
'are out of sync. This should never happen')
| gpl-3.0 | 1,491,817,624,055,527,700 | 27.711864 | 78 | 0.641677 | false |
ales-erjavec/scipy | benchmarks/benchmarks/fftpack_basic.py | 46 | 2646 | """ Test functions for fftpack.basic module
"""
from __future__ import division, absolute_import, print_function
from numpy import arange, asarray, zeros, dot, exp, pi, double, cdouble
import numpy.fft
from numpy.random import rand
try:
from scipy.fftpack import ifft, fft, fftn, irfft, rfft
except ImportError:
pass
from .common import Benchmark
def random(size):
return rand(*size)
def direct_dft(x):
x = asarray(x)
n = len(x)
y = zeros(n,dtype=cdouble)
w = -arange(n)*(2j*pi/n)
for i in range(n):
y[i] = dot(exp(i*w),x)
return y
def direct_idft(x):
x = asarray(x)
n = len(x)
y = zeros(n,dtype=cdouble)
w = arange(n)*(2j*pi/n)
for i in range(n):
y[i] = dot(exp(i*w),x)/n
return y
class Fft(Benchmark):
params = [
[100, 256, 512, 1000, 1024, 2048, 2048*2, 2048*4],
['real', 'cmplx'],
['scipy', 'numpy']
]
param_names = ['size', 'type', 'module']
def setup(self, size, cmplx, module):
if cmplx == 'cmplx':
self.x = random([size]).astype(cdouble)+random([size]).astype(cdouble)*1j
else:
self.x = random([size]).astype(double)
def time_fft(self, size, cmplx, module):
if module == 'numpy':
numpy.fft.fft(self.x)
else:
fft(self.x)
def time_ifft(self, size, cmplx, module):
if module == 'numpy':
numpy.fft.ifft(self.x)
else:
ifft(self.x)
class RFft(Benchmark):
params = [
[100, 256, 512, 1000, 1024, 2048, 2048*2, 2048*4],
['scipy', 'numpy']
]
param_names = ['size', 'module']
def setup(self, size, module):
self.x = random([size]).astype(double)
def time_rfft(self, size, module):
if module == 'numpy':
numpy.fft.rfft(self.x)
else:
rfft(self.x)
def time_irfft(self, size, module):
if module == 'numpy':
numpy.fft.irfft(self.x)
else:
irfft(self.x)
class Fftn(Benchmark):
params = [
["100x100", "1000x100", "256x256", "512x512"],
['real', 'cmplx'],
['scipy', 'numpy']
]
param_names = ['size', 'type', 'module']
def setup(self, size, cmplx, module):
size = map(int, size.split("x"))
if cmplx != 'cmplx':
self.x = random(size).astype(double)
else:
self.x = random(size).astype(cdouble)+random(size).astype(cdouble)*1j
def time_fftn(self, size, cmplx, module):
if module == 'numpy':
numpy.fft.fftn(self.x)
else:
fftn(self.x)
| bsd-3-clause | -995,884,213,733,752,600 | 22.625 | 85 | 0.538171 | false |
morenopc/edx-platform | cms/djangoapps/contentstore/views/assets.py | 3 | 11648 | import logging
from functools import partial
import math
import json
from django.http import HttpResponseBadRequest
from django.contrib.auth.decorators import login_required
from django.views.decorators.http import require_http_methods
from django_future.csrf import ensure_csrf_cookie
from django.views.decorators.http import require_POST
from django.conf import settings
from edxmako.shortcuts import render_to_response
from cache_toolbox.core import del_cached_content
from contentstore.utils import reverse_course_url
from xmodule.contentstore.django import contentstore
from xmodule.modulestore.django import modulestore
from xmodule.contentstore.content import StaticContent
from xmodule.exceptions import NotFoundError
from django.core.exceptions import PermissionDenied
from xmodule.modulestore.keys import CourseKey, AssetKey
from util.date_utils import get_default_time_display
from util.json_request import JsonResponse
from django.http import HttpResponseNotFound
from django.utils.translation import ugettext as _
from pymongo import ASCENDING, DESCENDING
from .access import has_course_access
from xmodule.modulestore.exceptions import ItemNotFoundError
__all__ = ['assets_handler']
# pylint: disable=unused-argument
@login_required
@ensure_csrf_cookie
def assets_handler(request, course_key_string=None, asset_key_string=None):
"""
The restful handler for assets.
It allows retrieval of all the assets (as an HTML page), as well as uploading new assets,
deleting assets, and changing the "locked" state of an asset.
GET
html: return an html page which will show all course assets. Note that only the asset container
is returned and that the actual assets are filled in with a client-side request.
json: returns a page of assets. The following parameters are supported:
page: the desired page of results (defaults to 0)
page_size: the number of items per page (defaults to 50)
sort: the asset field to sort by (defaults to "date_added")
direction: the sort direction (defaults to "descending")
POST
json: create (or update?) an asset. The only updating that can be done is changing the lock state.
PUT
json: update the locked state of an asset
DELETE
json: delete an asset
"""
course_key = CourseKey.from_string(course_key_string)
if not has_course_access(request.user, course_key):
raise PermissionDenied()
response_format = request.REQUEST.get('format', 'html')
if response_format == 'json' or 'application/json' in request.META.get('HTTP_ACCEPT', 'application/json'):
if request.method == 'GET':
return _assets_json(request, course_key)
else:
asset_key = AssetKey.from_string(asset_key_string) if asset_key_string else None
return _update_asset(request, course_key, asset_key)
elif request.method == 'GET': # assume html
return _asset_index(request, course_key)
else:
return HttpResponseNotFound()
def _asset_index(request, course_key):
"""
Display an editable asset library.
Supports start (0-based index into the list of assets) and max query parameters.
"""
course_module = modulestore().get_course(course_key)
return render_to_response('asset_index.html', {
'context_course': course_module,
'asset_callback_url': reverse_course_url('assets_handler', course_key)
})
def _assets_json(request, course_key):
"""
Display an editable asset library.
Supports start (0-based index into the list of assets) and max query parameters.
"""
requested_page = int(request.REQUEST.get('page', 0))
requested_page_size = int(request.REQUEST.get('page_size', 50))
requested_sort = request.REQUEST.get('sort', 'date_added')
sort_direction = DESCENDING
if request.REQUEST.get('direction', '').lower() == 'asc':
sort_direction = ASCENDING
# Convert the field name to the Mongo name
if requested_sort == 'date_added':
requested_sort = 'uploadDate'
elif requested_sort == 'display_name':
requested_sort = 'displayname'
sort = [(requested_sort, sort_direction)]
current_page = max(requested_page, 0)
start = current_page * requested_page_size
assets, total_count = _get_assets_for_page(request, course_key, current_page, requested_page_size, sort)
end = start + len(assets)
# If the query is beyond the final page, then re-query the final page so that at least one asset is returned
if requested_page > 0 and start >= total_count:
current_page = int(math.floor((total_count - 1) / requested_page_size))
start = current_page * requested_page_size
assets, total_count = _get_assets_for_page(request, course_key, current_page, requested_page_size, sort)
end = start + len(assets)
asset_json = []
for asset in assets:
asset_id = asset['_id']
asset_location = StaticContent.compute_location(course_key, asset_id['name'])
# note, due to the schema change we may not have a 'thumbnail_location' in the result set
thumbnail_location = asset.get('thumbnail_location', None)
if thumbnail_location:
thumbnail_location = course_key.make_asset_key('thumbnail', thumbnail_location[4])
asset_locked = asset.get('locked', False)
asset_json.append(_get_asset_json(asset['displayname'], asset['uploadDate'], asset_location, thumbnail_location, asset_locked))
return JsonResponse({
'start': start,
'end': end,
'page': current_page,
'pageSize': requested_page_size,
'totalCount': total_count,
'assets': asset_json,
'sort': requested_sort,
})
def _get_assets_for_page(request, course_key, current_page, page_size, sort):
"""
Returns the list of assets for the specified page and page size.
"""
start = current_page * page_size
return contentstore().get_all_content_for_course(
course_key, start=start, maxresults=page_size, sort=sort
)
@require_POST
@ensure_csrf_cookie
@login_required
def _upload_asset(request, course_key):
'''
This method allows for POST uploading of files into the course asset
library, which will be supported by GridFS in MongoDB.
'''
# Does the course actually exist?!? Get anything from it to prove its
# existence
try:
modulestore().get_course(course_key)
except ItemNotFoundError:
# no return it as a Bad Request response
logging.error("Could not find course: %s", course_key)
return HttpResponseBadRequest()
# compute a 'filename' which is similar to the location formatting, we're
# using the 'filename' nomenclature since we're using a FileSystem paradigm
# here. We're just imposing the Location string formatting expectations to
# keep things a bit more consistent
upload_file = request.FILES['file']
filename = upload_file.name
mime_type = upload_file.content_type
content_loc = StaticContent.compute_location(course_key, filename)
chunked = upload_file.multiple_chunks()
sc_partial = partial(StaticContent, content_loc, filename, mime_type)
if chunked:
content = sc_partial(upload_file.chunks())
tempfile_path = upload_file.temporary_file_path()
else:
content = sc_partial(upload_file.read())
tempfile_path = None
# first let's see if a thumbnail can be created
(thumbnail_content, thumbnail_location) = contentstore().generate_thumbnail(
content,
tempfile_path=tempfile_path
)
# delete cached thumbnail even if one couldn't be created this time (else
# the old thumbnail will continue to show)
del_cached_content(thumbnail_location)
# now store thumbnail location only if we could create it
if thumbnail_content is not None:
content.thumbnail_location = thumbnail_location
# then commit the content
contentstore().save(content)
del_cached_content(content.location)
# readback the saved content - we need the database timestamp
readback = contentstore().find(content.location)
locked = getattr(content, 'locked', False)
response_payload = {
'asset': _get_asset_json(content.name, readback.last_modified_at, content.location, content.thumbnail_location, locked),
'msg': _('Upload completed')
}
return JsonResponse(response_payload)
@require_http_methods(("DELETE", "POST", "PUT"))
@login_required
@ensure_csrf_cookie
def _update_asset(request, course_key, asset_key):
"""
restful CRUD operations for a course asset.
Currently only DELETE, POST, and PUT methods are implemented.
asset_path_encoding: the odd /c4x/org/course/category/name repr of the asset (used by Backbone as the id)
"""
if request.method == 'DELETE':
# Make sure the item to delete actually exists.
try:
content = contentstore().find(asset_key)
except NotFoundError:
return JsonResponse(status=404)
# ok, save the content into the trashcan
contentstore('trashcan').save(content)
# see if there is a thumbnail as well, if so move that as well
if content.thumbnail_location is not None:
# We are ignoring the value of the thumbnail_location-- we only care whether
# or not a thumbnail has been stored, and we can now easily create the correct path.
thumbnail_location = course_key.make_asset_key('thumbnail', asset_key.name)
try:
thumbnail_content = contentstore().find(thumbnail_location)
contentstore('trashcan').save(thumbnail_content)
# hard delete thumbnail from origin
contentstore().delete(thumbnail_content.get_id())
# remove from any caching
del_cached_content(thumbnail_location)
except:
logging.warning('Could not delete thumbnail: %s', thumbnail_location)
# delete the original
contentstore().delete(content.get_id())
# remove from cache
del_cached_content(content.location)
return JsonResponse()
elif request.method in ('PUT', 'POST'):
if 'file' in request.FILES:
return _upload_asset(request, course_key)
else:
# Update existing asset
try:
modified_asset = json.loads(request.body)
except ValueError:
return HttpResponseBadRequest()
contentstore().set_attr(asset_key, 'locked', modified_asset['locked'])
# Delete the asset from the cache so we check the lock status the next time it is requested.
del_cached_content(asset_key)
return JsonResponse(modified_asset, status=201)
def _get_asset_json(display_name, date, location, thumbnail_location, locked):
"""
Helper method for formatting the asset information to send to client.
"""
asset_url = location.to_deprecated_string()
external_url = settings.LMS_BASE + asset_url
return {
'display_name': display_name,
'date_added': get_default_time_display(date),
'url': asset_url,
'external_url': external_url,
'portable_url': StaticContent.get_static_path_from_location(location),
'thumbnail': thumbnail_location.to_deprecated_string() if thumbnail_location is not None else None,
'locked': locked,
# Needed for Backbone delete/update.
'id': unicode(location)
}
| agpl-3.0 | 7,785,474,822,139,345,000 | 38.754266 | 135 | 0.677713 | false |
seanandrews/diskpop | phot/priors.py | 1 | 1143 | #
#
#
import numpy as np
import pandas as pd
from scipy.interpolate import interp1d
import matplotlib.pyplot as plt
import sys
# effective temperature prior
# inputs
Sbar = 60.
eSbar = 1.
Tinput = 8700.
# load spectral type |-> temperature conversion file
dt = {'ST': np.str, 'STix': np.float64, 'Teff':np.float64, 'eTeff':np.float64}
a = pd.read_csv('data/adopted_spt-teff.txt', dtype=dt,
names=['ST','STix','Teff','eTeff'])
# discretized relationship
S_g = np.array(a['STix'])
T_g = np.array(a['Teff'])
eT_g = np.array(a['eTeff'])
# need to interpolate for appropriate integration
tint = interp1d(S_g, T_g)
eint = interp1d(S_g, eT_g)
S = np.linspace(np.min(S_g), np.max(S_g), num=10.*len(S_g))
T = tint(S)
eT = eint(S)
# calculate p(S)
p_S = np.exp(-0.5*((S-Sbar)/eSbar )**2) / (np.sqrt(2.*np.pi)*eSbar)
# now calculate p(T)
p_T = np.zeros_like(T)
for i in np.arange(len(T)):
p_TS = np.exp(-0.5*((T[i]-tint(S))/eint(S))**2) / \
(np.sqrt(2.*np.pi)*eint(S))
p_T[i] = np.trapz(p_TS*p_S, S)
# create an interpolator for p_T
p_tint = interp1d(T, p_T)
prior_T = p_tint(Tinput)
print(prior_T)
| mit | -8,302,033,829,685,894,000 | 21.86 | 78 | 0.620297 | false |
liama482/Picture | ggame/sysdeps.py | 227 | 1916 | def module_exists(module_name):
try:
__import__(module_name)
except ImportError:
return False
else:
return True
if module_exists('browser') and module_exists('javascript'):
from browser import window, document
from javascript import JSObject, JSConstructor
GFX = JSObject(window.PIXI)
GFX_Rectangle = JSConstructor(GFX.Rectangle)
GFX_Texture = JSConstructor(GFX.Texture)
GFX_Texture_fromImage = JSConstructor(GFX.Texture.fromImage)
GFX_Sprite = JSConstructor(GFX.Sprite)
GFX_Graphics = JSConstructor(GFX.Graphics)()
GFX_Text = JSConstructor(GFX.Text)
GFX_DetectRenderer = GFX.autoDetectRenderer
SND = JSObject(window.buzz)
SND_Sound = JSConstructor(SND.sound)
class GFX_Window(object):
def __init__(self, width, height, onclose):
self._w = window.open("", "")
self._stage = JSConstructor(GFX.Container)()
self.width = width if width != 0 else int(window.innerWidth * 0.9)
self.height = height if height != 0 else int(window.innerHeight * 0.9)
self._renderer = GFX.autoDetectRenderer(self.width, self.height, {'transparent':True})
self._w.document.body.appendChild(self._renderer.view)
self._w.onunload = onclose
def bind(self, evtspec, callback):
self._w.document.body.bind(evtspec, callback)
def add(self, obj):
self._stage.addChild(obj)
def remove(self, obj):
self._stage.removeChild(obj)
def animate(self, stepcallback):
self._renderer.render(self._stage)
self._w.requestAnimationFrame(stepcallback)
def destroy(self):
SND.all().stop()
self._stage.destroy()
elif module_exists('pygame'):
try:
from ggame.pygamedeps import *
except:
from pygamedeps import *
else:
try:
from ggame.headlessdeps import *
except:
from headlessdeps import *
| mit | -2,017,000,670,574,898,200 | 27.597015 | 92 | 0.656054 | false |
youprofit/NewsBlur | utils/munin/newsblur_feed_counts.py | 10 | 2628 | #!/usr/bin/env python
from utils.munin.base import MuninGraph
import redis
class NBMuninGraph(MuninGraph):
@property
def graph_config(self):
return {
'graph_category' : 'NewsBlur',
'graph_title' : 'NewsBlur Feed Counts',
'graph_vlabel' : 'Feeds Feed Counts',
'graph_args' : '-l 0',
'scheduled_feeds.label': 'scheduled_feeds',
'exception_feeds.label': 'exception_feeds',
'exception_pages.label': 'exception_pages',
'duplicate_feeds.label': 'duplicate_feeds',
'active_feeds.label': 'active_feeds',
'push_feeds.label': 'push_feeds',
}
def calculate_metrics(self):
from apps.rss_feeds.models import Feed, DuplicateFeed
from apps.push.models import PushSubscription
from django.conf import settings
from apps.statistics.models import MStatistics
exception_feeds = MStatistics.get('munin:exception_feeds')
if not exception_feeds:
exception_feeds = Feed.objects.filter(has_feed_exception=True).count()
MStatistics.set('munin:exception_feeds', exception_feeds, 60*60*12)
exception_pages = MStatistics.get('munin:exception_pages')
if not exception_pages:
exception_pages = Feed.objects.filter(has_page_exception=True).count()
MStatistics.set('munin:exception_pages', exception_pages, 60*60*12)
duplicate_feeds = MStatistics.get('munin:duplicate_feeds')
if not duplicate_feeds:
duplicate_feeds = DuplicateFeed.objects.count()
MStatistics.set('munin:duplicate_feeds', duplicate_feeds, 60*60*12)
active_feeds = MStatistics.get('munin:active_feeds')
if not active_feeds:
active_feeds = Feed.objects.filter(active_subscribers__gt=0).count()
MStatistics.set('munin:active_feeds', active_feeds, 60*60*12)
push_feeds = MStatistics.get('munin:push_feeds')
if not push_feeds:
push_feeds = PushSubscription.objects.filter(verified=True).count()
MStatistics.set('munin:push_feeds', push_feeds, 60*60*12)
r = redis.Redis(connection_pool=settings.REDIS_FEED_UPDATE_POOL)
return {
'scheduled_feeds': r.zcard('scheduled_updates'),
'exception_feeds': exception_feeds,
'exception_pages': exception_pages,
'duplicate_feeds': duplicate_feeds,
'active_feeds': active_feeds,
'push_feeds': push_feeds,
}
if __name__ == '__main__':
NBMuninGraph().run()
| mit | 7,878,823,341,885,764,000 | 39.430769 | 82 | 0.616819 | false |
gabrielfalcao/lettuce | tests/integration/lib/Django-1.3/django/contrib/flatpages/admin.py | 250 | 1089 | from django import forms
from django.contrib import admin
from django.contrib.flatpages.models import FlatPage
from django.utils.translation import ugettext_lazy as _
class FlatpageForm(forms.ModelForm):
url = forms.RegexField(label=_("URL"), max_length=100, regex=r'^[-\w/\.~]+$',
help_text = _("Example: '/about/contact/'. Make sure to have leading"
" and trailing slashes."),
error_message = _("This value must contain only letters, numbers,"
" dots, underscores, dashes, slashes or tildes."))
class Meta:
model = FlatPage
class FlatPageAdmin(admin.ModelAdmin):
form = FlatpageForm
fieldsets = (
(None, {'fields': ('url', 'title', 'content', 'sites')}),
(_('Advanced options'), {'classes': ('collapse',), 'fields': ('enable_comments', 'registration_required', 'template_name')}),
)
list_display = ('url', 'title')
list_filter = ('sites', 'enable_comments', 'registration_required')
search_fields = ('url', 'title')
admin.site.register(FlatPage, FlatPageAdmin)
| gpl-3.0 | 8,087,098,481,861,207,000 | 37.892857 | 133 | 0.630854 | false |
lcgong/alchemy | redbean/test/security/serv/secure.py | 2 | 2092 | import logging
logger = logging.getLogger(__name__)
from redbean.secure.identity import SessionIdentity
from redbean.secure.keeper import UserIdentityKeeper
from redbean.asyncid import AsyncID64
from test.security.app import rest, etcd_endpoint
user_id_generator = AsyncID64('/asyncid/user_sn', etcd_endpoint)
keeper = UserIdentityKeeper(etcd_endpoint, user_id_generator=user_id_generator)
# rest.
rest.set_path('.')
@rest.post('login')
@rest.prepare_session
async def login(json_body: dict) -> SessionIdentity:
client_id = json_body.get('client_id')
identity = json_body.get('identity')
passwd = json_body.get('passwd')
identity = await keeper.check_passwd(identity, passwd)
identity.client_id = client_id
return identity
@rest.post('logout')
@rest.close_session
async def logout(identity: SessionIdentity) -> None:
logger.debug(f'signout {identity}')
@rest.post('identity/new')
@rest.prepare_session
async def create_identity(json_body: dict) -> SessionIdentity:
login_id = json_body.get('identity')
passwd = json_body.get('passwd')
identity = await keeper.create_identity(login_id, passwd)
return identity
@rest.permission_verifier
async def verify_permissions(identity: SessionIdentity, permissions):
return await keeper.verify_permissions(identity.user_id, *permissions)
@rest.on_cleanup
async def cleanup():
user_id_generator.stop()
await user_id_generator.stopped()
# @rest.get('verify_email/{token}')
# @rest.prepare_session
# async def verify_email(token: str) -> SessionIdentity:
# """ 使用邮件确认链接确认其使用本人邮件地址作为登录标识 """
# assert token
# identity = await keeper.verify_email(token)
# return identity
# @rest.post('signup')
# async def signup(json_arg: dict) -> SessionIdentity:
# client_id = json_arg.get('client_id')
# identity = json_arg.get('login_id')
# passwd = json_arg.get('login_id')
# assert client_id
# assert identity
# assert passwd
# await keeper.create_email_identity(client_id, identity, passwd)
| gpl-3.0 | -4,719,006,951,941,784,000 | 24.848101 | 79 | 0.715475 | false |
Panos512/invenio | modules/bibauthorid/lib/bibauthorid_regression_tests.py | 5 | 8601 | # -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2006, 2007, 2008, 2009, 2010, 2011, 2012, 2013, 2014 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""BibAuthorId regressions tests."""
__revision__ = "$Id$"
from invenio.testutils import InvenioTestCase, \
run_test_suite, make_test_suite, test_web_page_content
from invenio.config import CFG_SITE_URL, \
CFG_INSPIRE_SITE, CFG_BIBAUTHORID_ENABLED
from invenio.dbquery import run_sql
import random
import string
class BibAuthorIdDisplayedPages(InvenioTestCase):
"""This regression test checks whether suitable pages are displayed
based on the variables CFG_INSPIRE_SITE and CFG_BIBAUTHORID_ENABLED"""
def setUp(self):
""" Initialization before tests"""
# This random, arbitrarily large string is obviously invalid.
self.any_name = ''.join(random.choice(string.lowercase) for x in range(26))
self.canonical_name = self._get_canonical_name()
def test_content_of_manage_profile(self):
"""This test checks whether the 'manage profile' page
is neutral of implementation (e.g. Inspire features) and
there are no authorisation issues."""
if CFG_INSPIRE_SITE or CFG_BIBAUTHORID_ENABLED:
# Ensures the authorization issue for manage_profile
# will not return.
url = '%s/author/manage_profile/%s' % (CFG_SITE_URL,
self.canonical_name)
text_not_there = 'This page is not accessible directly.'
response = test_web_page_content(url, 'guest',
unexpected_text=text_not_there)
self.assertEqual(list(), response)
# Ensures that the js (INSPIRE specific) login prompt box appears
# Only for Inspire
if self.canonical_name:
url = '%s/author/claim/%s' % (CFG_SITE_URL,
self.canonical_name)
guest_prompt_value = 'false'
if CFG_INSPIRE_SITE:
guest_prompt_value = 'true'
text_to_check = 'guestPrompt: %s' % guest_prompt_value
response = test_web_page_content(url, 'guest',
expected_text=text_to_check)
self.assertEqual(list(), response)
def test_content_of_profile_pages(self):
"""This test checks whether the profiles are displayed
containing appropriate error messages and content
and redirect to other appropriate."""
# If we're on Inspire, BibAuthorId is always enabled.
if CFG_INSPIRE_SITE or CFG_BIBAUTHORID_ENABLED:
# A valid canonical name should lead to the author's profile page.
if self.canonical_name:
url = '%s/author/profile/%s' % (CFG_SITE_URL,
self.canonical_name)
text_to_check = 'Personal Information'
response = test_web_page_content(url, 'guest',
expected_text=text_to_check)
self.assertEqual(list(), response)
# An invalid query for some profile, should lead to 'Person search'.
url = '%s/author/profile/%s' % (CFG_SITE_URL, self.any_name)
text_to_check = ['Person search',
'We do not have a publication list for \'%s\'.'
% self.any_name]
response = test_web_page_content(url, 'guest',
expected_text=text_to_check)
self.assertEqual(list(), response)
# author/%s searches are kept for backward compatibility.
# Should theses pages become obsolete,
# the regression test will not fail.
if self._test_web_page_existence_no_robots('%s/author/%s'
% (CFG_SITE_URL,
self.canonical_name)):
if self.canonical_name:
url = '%s/author/%s' % (CFG_SITE_URL,
self.canonical_name)
text_to_check = 'Personal Information'
response = test_web_page_content(url, 'guest',
expected_text=text_to_check)
self.assertEqual(list(), response)
url = '%s/author/%s' % (CFG_SITE_URL, self.any_name)
text_to_check = ['Person search',
'We do not have a publication list for \'%s\''
% self.any_name]
response = test_web_page_content(url, 'guest',
expected_text=text_to_check)
self.assertEqual(list(), response)
# Bibauthorid is disabled.
else:
# The navigation bar shouldn't be there.
text_not_there = ['View Profile', 'Manage Profile']
url = '%s/author/profile/Ellis,%%20J' % CFG_SITE_URL
text_to_check = ['Ellis, J', 'Personal Information']
response = test_web_page_content(url, 'guest',
expected_text=text_to_check,
unexpected_text=text_not_there)
self.assertEqual(list(), response)
# An invalid query for a profile, should lead to 'Person search'.
url = '%s/author/profile/%s' % (CFG_SITE_URL, self.any_name)
text_to_check = 'This doesn\'t look like a person ID!'
response = test_web_page_content(url, 'guest',
expected_text=text_to_check,
unexpected_text=text_not_there)
self.assertEqual(list(), response)
if self._test_web_page_existence_no_robots('%s/author/Ellis, J'
% CFG_SITE_URL):
url = '%s/author/Ellis,%%20J' % CFG_SITE_URL
text_to_check = ['Ellis, J', 'Personal Information']
response = test_web_page_content(url, 'guest',
expected_text=text_to_check,
unexpected_text=text_not_there)
self.assertEqual(list(), response)
url = '%s/author/%s' % (CFG_SITE_URL, self.any_name)
text_to_check = 'This doesn\'t look like a person ID!'
response = test_web_page_content(url, 'guest',
expected_text=text_to_check,
unexpected_text=text_not_there)
self.assertEqual(list(), response)
def _test_web_page_existence_no_robots(self, url):
"""Almost identical to testutils.test_web_page_existence(url) except
that we need to ignore robots.txt in some cases
(e.g. Invenio production) for this regression test."""
import mechanize
browser = mechanize.Browser()
try:
browser.set_handle_robots(False) # ignore robots.txt.
browser.open(url)
except:
raise
return True
def _get_canonical_name(self):
""" Fetches a valid canonical name from the database.
Returns None if it is empty."""
result = run_sql("select data from aidPERSONIDDATA where tag ="
+ "'canonical_name' LIMIT 1")
if result:
return result[0][0]
TEST_SUITE = make_test_suite(BibAuthorIdDisplayedPages)
if __name__ == "__main__":
run_test_suite(TEST_SUITE, warn_user=False)
| gpl-2.0 | 436,882,294,439,939,140 | 44.994652 | 83 | 0.537496 | false |
PyroShark/namebench | libnamebench/nameserver_test.py | 175 | 7015 | #!/usr/bin/env python
# Copyright 2009 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Mocks for tests."""
__author__ = '[email protected] (Thomas Stromberg)'
import mocks
import nameserver
import unittest
class TestNameserver(unittest.TestCase):
def testInit(self):
ns = mocks.MockNameServer(mocks.GOOD_IP)
self.assertEquals(ns.ip, mocks.GOOD_IP)
self.assertEquals(ns.name, None)
ns = mocks.MockNameServer(mocks.NO_RESPONSE_IP, name='Broked')
self.assertEquals(ns.ip, mocks.NO_RESPONSE_IP)
self.assertEquals(ns.name, 'Broked')
def testTimedRequest(self):
ns = mocks.MockNameServer(mocks.GOOD_IP)
(response, duration, exception) = ns.TimedRequest('A', 'www.paypal.com')
self.assertEquals(response.id, 999)
expected = ('www.paypal.com. 159 IN A 66.211.169.65\n'
'www.paypal.com. 159 IN A 66.211.169.2')
self.assertEquals(str(response.answer[0]), expected)
self.assertTrue(duration > 0)
self.assertEquals(exception, None)
def testTestAnswers(self):
ns = mocks.MockNameServer(mocks.GOOD_IP)
(is_broken, warning, duration) = ns.TestAnswers('A', 'www.paypal.com',
'10.0.0.1')
self.assertEquals(is_broken, False)
self.assertEquals(warning, None)
self.assertTrue(duration > 0 and duration < 3600)
def testResponseToAscii(self):
ns = mocks.MockNameServer(mocks.GOOD_IP)
(response, duration, exception) = ns.TimedRequest('A', 'www.paypal.com')
self.assertEquals(nameserver.ResponseToAscii(response),
'66.211.169.65 + 66.211.169.2')
response.answer = None
self.assertEquals(nameserver.ResponseToAscii(response), 'no answer')
def testGoogleComResponse(self):
ns = mocks.MockNameServer(mocks.GOOD_IP)
(is_broken, warning, duration) = ns.TestGoogleComResponse()
self.assertEquals(is_broken, False)
self.assertEquals(warning,
'google.com. is hijacked (66.211.169.65 + 66.211.169.2)')
self.assertTrue(duration > 0 and duration < 3600)
def testWwwGoogleComResponse(self):
ns = mocks.MockNameServer(mocks.GOOD_IP)
(is_broken, warning, duration) = ns.TestWwwGoogleComResponse()
self.assertEquals(is_broken, True)
self.assertEquals(warning, 'No answer')
self.assertTrue(duration > 0 and duration < 3600)
def testWwwPaypalComResponse(self):
ns = mocks.MockNameServer(mocks.GOOD_IP)
(is_broken, warning, duration) = ns.TestWwwPaypalComResponse()
self.assertEquals(is_broken, False)
self.assertEquals(warning, None)
def testNegativeResponse(self):
ns = mocks.MockNameServer(mocks.NO_RESPONSE_IP)
(is_broken, warning, duration) = ns.TestNegativeResponse()
self.assertEquals(is_broken, False)
self.assertEquals(warning, None)
def testNegativeResponseHijacked(self):
ns = mocks.MockNameServer(mocks.GOOD_IP)
(is_broken, warning, duration) = ns.TestNegativeResponse()
self.assertEquals(is_broken, False)
self.assertEquals(warning,
'NXDOMAIN Hijacking (66.211.169.65 + 66.211.169.2)')
def testNegativeResponseBroken(self):
ns = mocks.MockNameServer(mocks.BROKEN_IP)
(is_broken, warning, duration) = ns.TestNegativeResponse()
self.assertEquals(is_broken, True)
self.assertEquals(warning, 'BadResponse')
def testWildcardCache(self):
ns = mocks.MockNameServer(mocks.GOOD_IP)
(response, is_broken, warning, duration) = ns.QueryWildcardCache()
self.assertEquals(is_broken, False)
question = str(response.question[0])
self.assertTrue(question.startswith('namebench'))
self.assertEquals(warning, None)
def testCheckHealthGood(self):
ns = mocks.MockNameServer(mocks.GOOD_IP)
ns.CheckHealth()
self.assertEquals(ns.CheckHealth(), False)
self.assertEquals(ns.warnings, ['No answer'])
self.assertEquals(len(ns.checks), 1)
self.assertEquals(ns.failure[0], 'TestWwwGoogleComResponse')
self.assertEquals(ns.checks[0][0:3],
('TestWwwGoogleComResponse', True, 'No answer'))
def testCheckHealthPerfect(self):
ns = mocks.MockNameServer(mocks.PERFECT_IP)
ns.CheckHealth()
self.assertEquals(ns.CheckHealth(), True)
expected = ['www.google.com. is hijacked (66.211.169.65 + 66.211.169.2)',
'google.com. is hijacked (66.211.169.65 + 66.211.169.2)',
'NXDOMAIN Hijacking (66.211.169.65 + 66.211.169.2)']
self.assertEquals(ns.warnings, expected)
self.assertEquals(len(ns.checks), 5)
self.assertEquals(ns.failure, None)
self.assertTrue(ns.check_duration > 10)
def testQUeryWildcardCacheSaving(self):
ns = mocks.MockNameServer(mocks.GOOD_IP)
other_ns = mocks.MockNameServer(mocks.PERFECT_IP)
ns.QueryWildcardCache(save=True)
other_ns.QueryWildcardCache(save=True)
# Test our cache-sharing mechanisms
(hostname, ttl) = ns.cache_check
self.assertTrue(hostname.startswith('namebench'))
self.assertEquals(ttl, 159)
(other_hostname, other_ttl) = other_ns.cache_check
self.assertTrue(other_hostname.startswith('namebench'))
self.assertNotEqual(hostname, other_hostname)
self.assertEquals(other_ttl, 159)
def testSharedCacheNoMatch(self):
ns = mocks.MockNameServer(mocks.GOOD_IP)
other_ns = mocks.MockNameServer(mocks.PERFECT_IP)
ns.QueryWildcardCache(save=True)
other_ns.QueryWildcardCache(save=True)
(shared, slower, faster) = ns.TestSharedCache(other_ns)
self.assertEquals(shared, False)
self.assertEquals(slower, None)
self.assertEquals(faster, None)
def testSharedCacheMatch(self):
ns = mocks.MockNameServer(mocks.GOOD_IP)
other_ns = mocks.MockNameServer(mocks.PERFECT_IP)
ns.QueryWildcardCache(save=True)
other_ns.QueryWildcardCache(save=True)
# Increase the TTL of 'other'
other_ns.cache_check = (other_ns.cache_check[0], other_ns.cache_check[1] + 5)
(shared, slower, faster) = ns.TestSharedCache(other_ns)
self.assertEquals(shared, True)
self.assertEquals(slower.ip, mocks.GOOD_IP)
self.assertEquals(faster.ip, mocks.PERFECT_IP)
# Increase the TTL of 'other' by a whole lot
other_ns.cache_check = (other_ns.cache_check[0], other_ns.cache_check[1] + 3600)
(shared, slower, faster) = ns.TestSharedCache(other_ns)
self.assertEquals(shared, False)
self.assertEquals(slower, None)
self.assertEquals(faster, None)
if __name__ == '__main__':
unittest.main()
| apache-2.0 | 3,601,934,114,177,076,700 | 38.189944 | 84 | 0.699501 | false |
soldag/home-assistant | tests/components/rfxtrx/test_light.py | 14 | 6854 | """The tests for the Rfxtrx light platform."""
from unittest.mock import call
import pytest
from homeassistant.components.light import ATTR_BRIGHTNESS
from homeassistant.components.rfxtrx import DOMAIN
from homeassistant.core import State
from tests.common import MockConfigEntry, mock_restore_cache
from tests.components.rfxtrx.conftest import create_rfx_test_cfg
async def test_one_light(hass, rfxtrx):
"""Test with 1 light."""
entry_data = create_rfx_test_cfg(
devices={"0b1100cd0213c7f210020f51": {"signal_repetitions": 1}}
)
mock_entry = MockConfigEntry(domain="rfxtrx", unique_id=DOMAIN, data=entry_data)
mock_entry.add_to_hass(hass)
await hass.config_entries.async_setup(mock_entry.entry_id)
await hass.async_block_till_done()
state = hass.states.get("light.ac_213c7f2_16")
assert state
assert state.state == "off"
assert state.attributes.get("friendly_name") == "AC 213c7f2:16"
await hass.services.async_call(
"light", "turn_on", {"entity_id": "light.ac_213c7f2_16"}, blocking=True
)
state = hass.states.get("light.ac_213c7f2_16")
assert state.state == "on"
assert state.attributes.get("brightness") == 255
await hass.services.async_call(
"light", "turn_off", {"entity_id": "light.ac_213c7f2_16"}, blocking=True
)
state = hass.states.get("light.ac_213c7f2_16")
assert state.state == "off"
assert state.attributes.get("brightness") is None
await hass.services.async_call(
"light",
"turn_on",
{"entity_id": "light.ac_213c7f2_16", "brightness": 100},
blocking=True,
)
state = hass.states.get("light.ac_213c7f2_16")
assert state.state == "on"
assert state.attributes.get("brightness") == 100
await hass.services.async_call(
"light",
"turn_on",
{"entity_id": "light.ac_213c7f2_16", "brightness": 10},
blocking=True,
)
state = hass.states.get("light.ac_213c7f2_16")
assert state.state == "on"
assert state.attributes.get("brightness") == 10
await hass.services.async_call(
"light",
"turn_on",
{"entity_id": "light.ac_213c7f2_16", "brightness": 255},
blocking=True,
)
state = hass.states.get("light.ac_213c7f2_16")
assert state.state == "on"
assert state.attributes.get("brightness") == 255
await hass.services.async_call(
"light", "turn_off", {"entity_id": "light.ac_213c7f2_16"}, blocking=True
)
state = hass.states.get("light.ac_213c7f2_16")
assert state.state == "off"
assert state.attributes.get("brightness") is None
assert rfxtrx.transport.send.mock_calls == [
call(bytearray(b"\x0b\x11\x00\x00\x02\x13\xc7\xf2\x10\x01\x00\x00")),
call(bytearray(b"\x0b\x11\x00\x00\x02\x13\xc7\xf2\x10\x00\x00\x00")),
call(bytearray(b"\x0b\x11\x00\x00\x02\x13\xc7\xf2\x10\x02\x06\x00")),
call(bytearray(b"\x0b\x11\x00\x00\x02\x13\xc7\xf2\x10\x02\x00\x00")),
call(bytearray(b"\x0b\x11\x00\x00\x02\x13\xc7\xf2\x10\x02\x0f\x00")),
call(bytearray(b"\x0b\x11\x00\x00\x02\x13\xc7\xf2\x10\x00\x00\x00")),
]
@pytest.mark.parametrize("state,brightness", [["on", 100], ["on", 50], ["off", None]])
async def test_state_restore(hass, rfxtrx, state, brightness):
"""State restoration."""
entity_id = "light.ac_213c7f2_16"
mock_restore_cache(
hass, [State(entity_id, state, attributes={ATTR_BRIGHTNESS: brightness})]
)
entry_data = create_rfx_test_cfg(
devices={"0b1100cd0213c7f210020f51": {"signal_repetitions": 1}}
)
mock_entry = MockConfigEntry(domain="rfxtrx", unique_id=DOMAIN, data=entry_data)
mock_entry.add_to_hass(hass)
await hass.config_entries.async_setup(mock_entry.entry_id)
await hass.async_block_till_done()
assert hass.states.get(entity_id).state == state
assert hass.states.get(entity_id).attributes.get(ATTR_BRIGHTNESS) == brightness
async def test_several_lights(hass, rfxtrx):
"""Test with 3 lights."""
entry_data = create_rfx_test_cfg(
devices={
"0b1100cd0213c7f230020f71": {"signal_repetitions": 1},
"0b1100100118cdea02020f70": {"signal_repetitions": 1},
"0b1100101118cdea02050f70": {"signal_repetitions": 1},
}
)
mock_entry = MockConfigEntry(domain="rfxtrx", unique_id=DOMAIN, data=entry_data)
mock_entry.add_to_hass(hass)
await hass.config_entries.async_setup(mock_entry.entry_id)
await hass.async_block_till_done()
await hass.async_start()
state = hass.states.get("light.ac_213c7f2_48")
assert state
assert state.state == "off"
assert state.attributes.get("friendly_name") == "AC 213c7f2:48"
state = hass.states.get("light.ac_118cdea_2")
assert state
assert state.state == "off"
assert state.attributes.get("friendly_name") == "AC 118cdea:2"
state = hass.states.get("light.ac_1118cdea_2")
assert state
assert state.state == "off"
assert state.attributes.get("friendly_name") == "AC 1118cdea:2"
await rfxtrx.signal("0b1100cd0213c7f230010f71")
state = hass.states.get("light.ac_213c7f2_48")
assert state
assert state.state == "on"
await rfxtrx.signal("0b1100cd0213c7f230000f71")
state = hass.states.get("light.ac_213c7f2_48")
assert state
assert state.state == "off"
await rfxtrx.signal("0b1100cd0213c7f230020f71")
state = hass.states.get("light.ac_213c7f2_48")
assert state
assert state.state == "on"
assert state.attributes.get("brightness") == 255
@pytest.mark.parametrize("repetitions", [1, 3])
async def test_repetitions(hass, rfxtrx, repetitions):
"""Test signal repetitions."""
entry_data = create_rfx_test_cfg(
devices={"0b1100cd0213c7f230020f71": {"signal_repetitions": repetitions}}
)
mock_entry = MockConfigEntry(domain="rfxtrx", unique_id=DOMAIN, data=entry_data)
mock_entry.add_to_hass(hass)
await hass.config_entries.async_setup(mock_entry.entry_id)
await hass.async_block_till_done()
await hass.services.async_call(
"light", "turn_on", {"entity_id": "light.ac_213c7f2_48"}, blocking=True
)
await hass.async_block_till_done()
assert rfxtrx.transport.send.call_count == repetitions
async def test_discover_light(hass, rfxtrx_automatic):
"""Test with discovery of lights."""
rfxtrx = rfxtrx_automatic
await rfxtrx.signal("0b11009e00e6116202020070")
state = hass.states.get("light.ac_0e61162_2")
assert state
assert state.state == "on"
assert state.attributes.get("friendly_name") == "AC 0e61162:2"
await rfxtrx.signal("0b1100120118cdea02020070")
state = hass.states.get("light.ac_118cdea_2")
assert state
assert state.state == "on"
assert state.attributes.get("friendly_name") == "AC 118cdea:2"
| apache-2.0 | -4,125,882,202,051,886,000 | 33.27 | 86 | 0.662533 | false |
KenKundert/quantiphy | tests/test_unit_conversion.py | 1 | 13490 | # encoding: utf8
from quantiphy import (
Quantity, UnitConversion,
QuantiPhyError, IncompatibleUnits, UnknownPreference, UnknownConversion,
UnknownUnitSystem, InvalidRecognizer, UnknownFormatKey, UnknownScaleFactor,
InvalidNumber, ExpectedQuantity, MissingName,
)
Quantity.reset_prefs()
import math
import pytest
def test_simple_scaling():
Quantity.reset_prefs()
with Quantity.prefs(
spacer=None, show_label=None, label_fmt=None, label_fmt_full=None
):
q=Quantity('1kg', scale=2)
qs=Quantity('2ms')
assert q.render() == '2 kg'
assert qs.render() == '2 ms'
assert q.render(scale=0.001) == '2 g'
assert str(q.scale(0.001)) == '2 g'
assert q.render(scale=qs) == '4 g'
assert str(q.scale(qs)) == '4 g'
with pytest.raises(KeyError) as exception:
q.render(scale='fuzz')
assert str(exception.value) == "unable to convert between 'fuzz' and 'g'."
assert isinstance(exception.value, UnknownConversion)
assert isinstance(exception.value, QuantiPhyError)
assert isinstance(exception.value, KeyError)
assert exception.value.args == ('fuzz', 'g')
with pytest.raises(KeyError) as exception:
q.scale('fuzz')
assert str(exception.value) == "unable to convert between 'fuzz' and 'g'."
assert isinstance(exception.value, UnknownConversion)
assert isinstance(exception.value, QuantiPhyError)
assert isinstance(exception.value, KeyError)
assert exception.value.args == ('fuzz', 'g')
q=Quantity('1', units='g', scale=1000)
assert q.render() == '1 kg'
assert q.render(scale=(0.0022046, 'lbs')) == '2.2046 lbs'
assert str(q.scale((0.0022046, 'lbs'))) == '2.2046 lbs'
q=Quantity('1', units='g', scale=qs)
assert q.render() == '2 mg'
q=Quantity('1', scale=(1000, 'g'))
assert q.render() == '1 kg'
assert q.render(scale=lambda v, u: (0.0022046*v, 'lbs')) == '2.2046 lbs'
def dB(v, u):
return 20*math.log(v, 10), 'dB'+u
def adB(v, u):
return pow(10, v/20), u[2:] if u.startswith('dB') else u
q=Quantity('-40 dBV', scale=adB)
assert q.render() == '10 mV'
assert q.render(scale=dB) == '-40 dBV'
assert str(q.scale(dB)) == '-40 dBV'
def test_temperature():
Quantity.reset_prefs()
with Quantity.prefs(
spacer=None, show_label=None, label_fmt=None, label_fmt_full=None,
ignore_sf=True
):
q=Quantity('100 °C')
assert q.render() == '100 °C'
assert q.render(scale='C') == '100 C'
assert q.render(scale='°C') == '100 °C'
assert q.render(scale='K') == '373.15 K'
assert q.render(scale='°F') == '212 °F'
assert q.render(scale='F') == '212 F'
assert q.render(scale='°R') == '671.67 °R'
assert q.render(scale='R') == '671.67 R'
q=Quantity('100 C')
assert q.render() == '100 C'
assert q.render(scale='C') == '100 C'
assert q.render(scale='K') == '373.15 K'
assert q.render(scale='F') == '212 F'
assert q.render(scale='R') == '671.67 R'
assert q.render(scale='°C') == '100 °C'
assert q.render(scale='°F') == '212 °F'
assert q.render(scale='°R') == '671.67 °R'
q=Quantity('373.15 K')
assert q.render() == '373.15 K'
assert q.render(scale='C') == '100 C'
assert q.render(scale='K') == '373.15 K'
assert q.render(scale='F') == '212 F'
assert q.render(scale='R') == '671.67 R'
assert q.render(scale='°C') == '100 °C'
assert q.render(scale='°F') == '212 °F'
assert q.render(scale='°R') == '671.67 °R'
q=Quantity('212 °F')
assert q.render() == '212 °F'
assert q.render(scale='°C') == '100 °C'
assert q.render(scale='C') == '100 C'
assert q.render(scale='K') == '373.15 K'
assert q.render(scale='°F') == '212 °F'
assert q.render(scale='F') == '212 F'
#assert q.render(scale='°R') == '671.67 °R'
#assert q.render(scale='R') == '671.67 R'
q=Quantity('212 F')
assert q.render() == '212 F'
assert q.render(scale='C') == '100 C'
assert q.render(scale='K') == '373.15 K'
assert q.render(scale='°C') == '100 °C'
assert q.render(scale='°F') == '212 °F'
assert q.render(scale='F') == '212 F'
#assert q.render(scale='°R') == '671.67 °R'
#assert q.render(scale='R') == '671.67 R'
q=Quantity('100 °C', scale='K')
assert q.render() == '373.15 K'
q=Quantity('212 °F', scale='K')
assert q.render() == '373.15 K'
q=Quantity('212 °F', scale='C')
assert q.render() == '100 C'
q=Quantity('212 F', scale='°C')
assert q.render() == '100 °C'
q=Quantity('491.67 R', scale='°C')
assert q.is_close(Quantity('0 °C'))
q=Quantity('491.67 R', scale='K')
assert q.render() == '273.15 K'
def test_distance():
Quantity.reset_prefs()
with Quantity.prefs(
spacer=None, show_label=None, label_fmt=None, label_fmt_full=None,
ignore_sf=False
):
q=Quantity('1_m')
assert q.render() == '1 m'
assert q.render(scale='cm', form='eng') == '100 cm'
assert q.render(scale='mm', form='eng') == '1e3 mm'
assert q.render(scale='um', form='eng') == '1e6 um'
assert q.render(scale='μm', form='eng') == '1e6 μm'
assert q.render(scale='nm', form='eng') == '1e9 nm'
assert q.render(scale='Å', form='eng') == '10e9 Å'
assert q.render(scale='angstrom', form='eng') == '10e9 angstrom'
assert q.render(scale='mi') == '621.37 umi'
assert q.render(scale='mile') == '621.37 umile'
assert q.render(scale='miles') == '621.37 umiles'
assert q.render(scale='in') == '39.37 in'
assert q.render(scale='inch') == '39.37 inch'
assert q.render(scale='inches') == '39.37 inches'
q=Quantity('1_m')
assert q.render() == '1 m'
q=Quantity('100cm', scale='m')
assert q.render() == '1 m'
q=Quantity('1cm', scale='m')
assert q.render() == '10 mm'
q=Quantity('1000mm', scale='m')
assert q.render() == '1 m'
q=Quantity('1mm', scale='m')
assert q.render() == '1 mm'
q=Quantity('1000000um', scale='m')
assert q.render() == '1 m'
q=Quantity('1um', scale='m')
assert q.render() == '1 um'
q=Quantity('1000000μm', scale='m')
assert q.render() == '1 m'
q=Quantity('1μm', scale='m')
assert q.render() == '1 um'
q=Quantity('1000000000nm', scale='m')
assert q.render() == '1 m'
q=Quantity('1nm', scale='m')
assert q.render() == '1 nm'
q=Quantity('10000000000Å', scale='m')
assert q.render() == '1 m'
q=Quantity('1Å', scale='m')
assert q.render() == '100 pm'
q=Quantity('1_mi', scale='m')
assert q.render() == '1.6093 km'
q=Quantity('1_mile', scale='m')
assert q.render() == '1.6093 km'
q=Quantity('1_miles', scale='m')
assert q.render() == '1.6093 km'
q=Quantity('d = 93 Mmiles -- average distance from Sun to Earth', scale='m')
assert q.render() == '149.67 Gm'
def test_mass():
Quantity.reset_prefs()
with Quantity.prefs(
spacer=None, show_label=None, label_fmt=None, label_fmt_full=None,
ignore_sf=False
):
q=Quantity('1 g')
assert q.render() == '1 g'
assert q.render(scale='oz') == '35.274 moz'
assert q.render(scale='lb') == '2.2046 mlb'
assert q.render(scale='lbs') == '2.2046 mlbs'
q=Quantity('1 oz', scale='g')
assert q.render() == '28.35 g'
q=Quantity('1 lb', scale='g')
assert q.render() == '453.59 g'
q=Quantity('1 lbs', scale='g')
assert q.render() == '453.59 g'
def test_time():
Quantity.reset_prefs()
with Quantity.prefs(
spacer=None, show_label=None, label_fmt=None, label_fmt_full=None,
ignore_sf=True
):
q=Quantity('86400 s')
assert q.render() == '86.4 ks'
assert q.render(scale='sec') == '86.4 ksec'
assert q.render(scale='min') == '1.44 kmin'
assert q.render(scale='hr') == '24 hr'
assert q.render(scale='hour') == '24 hour'
assert q.render(scale='day') == '1 day'
q=Quantity('1 day', scale='s')
assert q.render() == '86.4 ks'
q=Quantity('24 hour', scale='s')
assert q.render() == '86.4 ks'
q=Quantity('24 hr', scale='s')
assert q.render() == '86.4 ks'
q=Quantity('60 min', scale='s')
assert q.render() == '3.6 ks'
q=Quantity('60 sec', scale='s')
assert q.render() == '60 s'
def test_scale():
Quantity.reset_prefs()
secs = Quantity('86400 s')
days = secs.scale('day')
assert secs.render() == '86.4 ks'
assert days.render() == '1 day'
def test_add():
Quantity.reset_prefs()
total = Quantity(0, '$')
for contribution in [1.23, 4.56, 7.89]:
total = total.add(contribution)
assert total.render() == '$13.68'
for contribution in [1.23, 4.56, 8.89]:
total = total.add(contribution, check_units=True)
assert total.render() == '$28.36'
for contribution in [1.23, 4.56, 9.89]:
total = total.add(Quantity(contribution, '$'), check_units=True)
assert total.render() == '$44.04'
try:
total = total.add(Quantity(contribution, 'lbs'), check_units=True)
assert False
except TypeError:
assert True
def test_coversion():
Quantity.reset_prefs()
conversion = UnitConversion('USD', 'BTC', 100000)
assert str(conversion) == 'USD = 100000*BTC'
result = conversion.convert(1, 'BTC', 'USD')
assert str(result) == '100 kUSD'
result = conversion.convert(1, 'USD', 'BTC')
assert str(result) == '10 uBTC'
result = conversion.convert(from_units='BTC', to_units='USD')
assert str(result) == '100 kUSD'
result = conversion.convert(from_units='USD', to_units='BTC')
assert str(result) == '10 uBTC'
result = conversion.convert('BTC')
assert str(result) == '100 kUSD'
result = conversion.convert('USD')
assert str(result) == '10 uBTC'
result = conversion.convert(10)
assert str(result) == '1 MUSD'
dollar = Quantity('200000 USD')
bitcoin = conversion.convert(dollar)
assert str(bitcoin) == '2 BTC'
dollar = conversion.convert(bitcoin)
assert str(dollar) == '200 kUSD'
conversion = UnitConversion('F', 'C', 1.8, 32)
assert str(conversion) == 'F = 1.8*C + 32'
result = conversion.convert(0, 'C', 'F')
assert str(result) == '32 F'
result = conversion.convert(32, to_units='C')
assert str(result) == '0 C'
result = conversion.convert(32, from_units='F')
assert str(result) == '0 C'
with pytest.raises(KeyError) as exception:
result = conversion.convert(0, from_units='X', to_units='X')
assert str(exception.value) == "unable to convert to 'X'."
assert isinstance(exception.value, UnknownConversion)
assert isinstance(exception.value, QuantiPhyError)
assert isinstance(exception.value, KeyError)
assert exception.value.args == ('X',)
result = conversion.convert(0, to_units='X')
assert str(result) == '32 F'
with pytest.raises(KeyError) as exception:
result = conversion.convert(0, from_units='X')
assert str(exception.value) == "unable to convert from 'X'."
assert isinstance(exception.value, UnknownConversion)
assert isinstance(exception.value, QuantiPhyError)
assert isinstance(exception.value, KeyError)
assert exception.value.args == ('X',)
def test_func():
Quantity.reset_prefs()
def from_dB(value):
return 10**(value/20)
def to_dB(value):
return 20*math.log10(value)
vconverter = UnitConversion('V', 'dBV', from_dB, to_dB)
assert str(vconverter) == 'V = from_dB(dBV), dBV = to_dB(V)'
assert str(vconverter.convert(Quantity('100mV'))) == '-20 dBV'
assert str(vconverter.convert(Quantity('-20dBV'))) == '100 mV'
aconverter = UnitConversion('A', 'dBA', from_dB, to_dB)
assert str(aconverter) == 'A = from_dB(dBA), dBA = to_dB(A)'
assert str(aconverter.convert(Quantity('100mA'))) == '-20 dBA'
assert str(aconverter.convert(Quantity('-20dBA'))) == '100 mA'
assert '{:pdBV}'.format(Quantity('100mV')) == '-20 dBV'
assert '{:pdBV}'.format(Quantity('10V')) == '20 dBV'
assert '{:pV}'.format(Quantity('-20 dBV')) == '0.1 V'
assert '{:pV}'.format(Quantity('20 dBV')) == '10 V'
assert '{:pdBA}'.format(Quantity('100mA')) == '-20 dBA'
assert '{:pdBA}'.format(Quantity('10A')) == '20 dBA'
assert '{:pA}'.format(Quantity('-20 dBA')) == '0.1 A'
assert '{:pA}'.format(Quantity('20 dBA')) == '10 A'
if __name__ == '__main__':
# As a debugging aid allow the tests to be run on their own, outside pytest.
# This makes it easier to see and interpret and textual output.
defined = dict(globals())
for k, v in defined.items():
if callable(v) and k.startswith('test_'):
print()
print('Calling:', k)
print((len(k)+9)*'=')
v()
| gpl-3.0 | -8,724,361,906,445,110,000 | 33.464103 | 85 | 0.561119 | false |
quantumlib/OpenFermion | src/openfermion/transforms/repconversions/operator_tapering_test.py | 1 | 1640 | # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""tests for operator_tapering.py"""
import unittest
from openfermion.ops.operators import FermionOperator, BosonOperator
from openfermion.transforms.repconversions.operator_tapering import (
freeze_orbitals, prune_unused_indices)
class FreezeOrbitalsTest(unittest.TestCase):
def test_freeze_orbitals_nonvanishing(self):
op = FermionOperator(((1, 1), (1, 0), (0, 1), (2, 0)))
op_frozen = freeze_orbitals(op, [1])
expected = FermionOperator(((0, 1), (1, 0)), -1)
self.assertEqual(op_frozen, expected)
def test_freeze_orbitals_vanishing(self):
op = FermionOperator(((1, 1), (2, 0)))
op_frozen = freeze_orbitals(op, [], [2])
self.assertEqual(len(op_frozen.terms), 0)
class PruneUnusedIndicesTest(unittest.TestCase):
def test_prune(self):
for LadderOp in (FermionOperator, BosonOperator):
op = LadderOp(((1, 1), (8, 1), (3, 0)), 0.5)
op = prune_unused_indices(op)
expected = LadderOp(((0, 1), (2, 1), (1, 0)), 0.5)
self.assertTrue(expected == op) | apache-2.0 | 7,250,514,672,289,180,000 | 39.02439 | 76 | 0.667683 | false |
gooftroop/Zeus | contrib/tornado/test/tcpserver_test.py | 105 | 1278 | import socket
from tornado import gen
from tornado.iostream import IOStream
from tornado.log import app_log
from tornado.stack_context import NullContext
from tornado.tcpserver import TCPServer
from tornado.testing import AsyncTestCase, ExpectLog, bind_unused_port, gen_test
class TCPServerTest(AsyncTestCase):
@gen_test
def test_handle_stream_coroutine_logging(self):
# handle_stream may be a coroutine and any exception in its
# Future will be logged.
class TestServer(TCPServer):
@gen.coroutine
def handle_stream(self, stream, address):
yield gen.moment
stream.close()
1/0
server = client = None
try:
sock, port = bind_unused_port()
with NullContext():
server = TestServer()
server.add_socket(sock)
client = IOStream(socket.socket())
with ExpectLog(app_log, "Exception in callback"):
yield client.connect(('localhost', port))
yield client.read_until_close()
yield gen.moment
finally:
if server is not None:
server.stop()
if client is not None:
client.close()
| mit | -8,315,910,516,919,921,000 | 32.631579 | 80 | 0.592332 | false |
yqm/sl4a | python/src/Lib/commands.py | 58 | 2540 | """Execute shell commands via os.popen() and return status, output.
Interface summary:
import commands
outtext = commands.getoutput(cmd)
(exitstatus, outtext) = commands.getstatusoutput(cmd)
outtext = commands.getstatus(file) # returns output of "ls -ld file"
A trailing newline is removed from the output string.
Encapsulates the basic operation:
pipe = os.popen('{ ' + cmd + '; } 2>&1', 'r')
text = pipe.read()
sts = pipe.close()
[Note: it would be nice to add functions to interpret the exit status.]
"""
__all__ = ["getstatusoutput","getoutput","getstatus"]
# Module 'commands'
#
# Various tools for executing commands and looking at their output and status.
#
# NB This only works (and is only relevant) for UNIX.
# Get 'ls -l' status for an object into a string
#
def getstatus(file):
"""Return output of "ls -ld <file>" in a string."""
import warnings
warnings.warn("commands.getstatus() is deprecated", DeprecationWarning)
return getoutput('ls -ld' + mkarg(file))
# Get the output from a shell command into a string.
# The exit status is ignored; a trailing newline is stripped.
# Assume the command will work with '{ ... ; } 2>&1' around it..
#
def getoutput(cmd):
"""Return output (stdout or stderr) of executing cmd in a shell."""
return getstatusoutput(cmd)[1]
# Ditto but preserving the exit status.
# Returns a pair (sts, output)
#
def getstatusoutput(cmd):
"""Return (status, output) of executing cmd in a shell."""
import os
pipe = os.popen('{ ' + cmd + '; } 2>&1', 'r')
text = pipe.read()
sts = pipe.close()
if sts is None: sts = 0
if text[-1:] == '\n': text = text[:-1]
return sts, text
# Make command argument from directory and pathname (prefix space, add quotes).
#
def mk2arg(head, x):
from warnings import warnpy3k
warnpy3k("In 3.x, mk2arg has been removed.")
import os
return mkarg(os.path.join(head, x))
# Make a shell command argument from a string.
# Return a string beginning with a space followed by a shell-quoted
# version of the argument.
# Two strategies: enclose in single quotes if it contains none;
# otherwise, enclose in double quotes and prefix quotable characters
# with backslash.
#
def mkarg(x):
from warnings import warnpy3k
warnpy3k("in 3.x, mkarg has been removed.")
if '\'' not in x:
return ' \'' + x + '\''
s = ' "'
for c in x:
if c in '\\$"`':
s = s + '\\'
s = s + c
s = s + '"'
return s
| apache-2.0 | -7,031,704,543,468,686,000 | 27.222222 | 79 | 0.643307 | false |
scipsycho/mlpack | linear_regression.py | 1 | 8215 | import numpy as np
class linear_regression:
def __init__(self,batch_size=0,epochs=100,learning_rate=0.001,tolerance=0.00001,show_progress=True):
"""
The function initiaizes the class
Parameters
----------
batch_size: int
It defines the number of data sets the alogrithm takes at ones to optimise the Parameters.
It may be the factor of the number of examples given to the algorithm in the fit function
Default Value is 0 which means it will compute all the data sets together.
epochs: int
It is the maximum number of times the algorithm is going to compute the whole data set available
for training.
Default Value is 100
learning_rate: float
It is the learning rate of the machine learning algorithm.
Default Value is 0.001
tolerance: float
It defines the minimum improvement that the algorithm will tolerate i.e. if the parameters show a change
less than the value of tolerance, it assumes that the alogrithm is optimised to the maximum
Default Value is 0.00001
show_progress: Boolean
It controls whether the object will show the progress as output or not.
Default Value: True
Returns
-------
Nothing
"""
#Batch Size
self.batch=batch_size
#Maximum number of iterations that the object will perfom
self.epochs=epochs
#Learning Rate of the linear regression algo
self.l_rate=learning_rate
#Bool Value of whtether to show progress or not
self.show_progress=show_progress
#Maximum change in parameters or weights that can be assumed negligible
self.tol=tolerance
def fit(self,X,Y):
"""
The function fits the training data set to the algorithm.
Detailed Description
--------------------
The function takes on the input and actual ouput of the data set and optimises the parameters accordingly.
Parameters
----------
X: numpy.ndarray
It is the input data set. The number of columns define the number of dimensions in the input data.
The number of rows defines the number of data sets avaiable for training.
If there is only one dimension, it can also be a linear numpy.ndarray.
Y: numpy.ndarray
It is the proposed output corresponding to the input given in any row of the input data set X.
The number of rows defines the number of data sets avaiable for training.
It can also be a linear numpy.ndarray.
Returns
-------
Nothing
Notes
-----
X.shape[0] must be equal to Y.shape[0] which is also the number of data sets avaiable for training.
"""
#Number of Training Examples
self.note=X.shape[0]
#If Batch value is zero, it is assumed the whole dataset is the batch
if self.batch is 0:
self.batch=self.note
#Changing Vector To Mat
if len(X.shape) is 1:
X=X.reshape([X.shape[0],1])
#Number of Dimensions plus one bias introducted
self.nod=X.shape[1]+1
#Training data initialized
self.train_i=np.ones([self.note,self.nod])
#Leaving Bias values as 1
self.train_i[:,1:]=X
#Training data output stored and changing Vector To Matrix
if len(Y.shape) is 1:
Y=Y.reshape([Y.shape[0],1])
self.train_o=Y
#Parameters or weights randomly generated
self.parameters=np.random.random([self.nod,1])
#Starting Gradient Descent
self.__start_gradient_descent__()
def __GradDescent__(self,initial,final):
"""
The function optimises the paramters according a specific subset of the data set available
Parameters
----------
initial: int
It is the inital index of block of the data set being used.
final: int
It is the final index of block of the data set being used.
Returns
-------
Nothing
Notes
-----
initial should always be less than or equal to final. Also, final should always be less than the
number of data sets avaiable
"""
#Difference between expected and actual values
diff=(self.train_i[initial:final].dot(self.parameters)-self.train_o[initial:final])
#Multiplying with respected values to get differentiation
product=diff*self.train_i[initial:final]
#Adding column-wise to get differentitation w.r.t. parameters
delta=(product.sum(axis=0))*self.l_rate/(final-initial+1)
#Changing the Value Of parameters
self.parameters=self.parameters-delta.reshape([delta.shape[0],1])
def __start_gradient_descent__(self):
"""
This function optimises the parameters for the whole data set.
Detailed Description
--------------------
This function uses the number of batches, epochs, tolerance to find the optimised value of the parameters
according to the need of the user. The function also shows progress in terms of the epochs covered. This does
not take into account the tolerance value.
Parameters
----------
None
Returns
-------
None
"""
#Number of times the whole set of Parameters be optimized in one epochs
times=int(self.note/self.batch)
#Value used to show percentage
percent=1
#Loss Curve is initialzed every time this function is being called
self.loss_curve=[]
#Gradient Desecent Started
for i in range(self.epochs):
#Initial Parameters Stored
self.initial_parameters=self.parameters
for j in range(times):
initial=j*self.batch
final=(j+1)*self.batch
self.__GradDescent__(initial,final)
#One Iteration of Gradient Descent Complete
#Finding and adding loss to the loss curve
diff=(self.train_i.dot(self.parameters)-self.train_o)
loss=(np.abs(diff)).sum()
self.loss_curve.append(loss)
#Checking for tolerance
if (np.abs(self.initial_parameters-self.parameters)).sum()/self.note < self.tol:
print('Optimised to the maxium')
break
#For showing percentage
if self.show_progress and(i*100/self.epochs >= percent):
print('|',end='')
percent+=1
#Completing the Percentage if the loops is broken in between
while percent<=101 and self.show_progress:
print('|',end='')
percent+=1
#Displaying 100% Complete
if self.show_progress:
print(" 100%")
def predict(self,Y):
"""
This function gives the predicted value of the data set given for testing.
Parameters
----------
Y: numpy.ndarray
This is the input of the linear regression model whose number of columns represent
the number of dimensions of the input. The rows represent the number of data sets given
for prediction.
Returns
------
numpy.ndarray
This the predicted output of the input given in Y. It's number of rows represent
the number of data sets given for prediction
Notes
-----
Y.shape[1] should be equal to the number of dimensions given in the fit function.
"""
#Converting the testing data into data with bias
self.test_i=np.ones([Y.shape[0],self.nod])
if len(Y.shape) is 1:
Y=Y.reshape([Y.shape[0],1])
self.test_i[:,1:]=Y
#Storing Output
self.test_o=self.test_i.dot(self.parameters)
return self.test_o
| mit | -2,167,330,526,561,842,700 | 30.841085 | 124 | 0.589653 | false |
rabernat/mitgcm-xray | _xgcm/mdsxray.py | 2 | 23927 | import operator
from glob import glob
import os
import re
import warnings
import numpy as np
import dask.array as da
import xray
from xray import Variable
from xray.backends.common import AbstractDataStore
from xray.core.utils import NDArrayMixin
from xray.core.pycompat import OrderedDict
from xray.core.indexing import NumpyIndexingAdapter
#from ..conventions import pop_to, cf_encoder
#from ..core import indexing
#from ..core.utils import (FrozenOrderedDict, NDArrayMixin,
# close_on_error, is_remote_uri)
#from ..core.pycompat import iteritems, basestring, OrderedDict
#from .common import AbstractWritableDataStore, robust_getitem
# This lookup table maps from dtype.byteorder to a readable endian
# string used by netCDF4.
_endian_lookup = {'=': 'native',
'>': 'big',
'<': 'little',
'|': 'native'}
# the variable metadata will be stored in dicts of the form
#_variable[varname] = (dimensions, description, units)
_grid_variables = OrderedDict(
# horizontal grid
X= (('X',), "X-coordinate of cell center", "meters"),
Y= (('Y',), "Y-coordinate of cell center", "meters"),
Xp1= (('Xp1',), "X-coordinate of cell corner", "meters"),
Yp1= (('Yp1',), "Y-coordinate of cell corner", "meters"),
# 2d versions
XC= (('Y','X'), "X coordinate of cell center (T-P point)", "degree_east"),
YC= (('Y','X'), "Y coordinate of cell center (T-P point)", "degree_north"),
XG= (('Yp1','Xp1'), "X coordinate of cell corner (Vorticity point)", "degree_east"),
YG= (('Yp1','Xp1'), "Y coordinate of cell corner (Vorticity point)", "degree_north"),
# vertical grid
Z= (('Z',), "vertical coordinate of cell center", "meters"),
Zp1= (('Zp1',), "vertical coordinate of cell interface", "meters"),
Zu= (('Zu',), "vertical coordinate of lower cell interface", "meters"),
Zl= (('Zl',), "vertical coordinate of upper cell interface", "meters"),
# (for some reason, the netCDF files use both R and Z notation )
# 'RC': (('Z',), "R coordinate of cell center", "m"),
# 'RF': (('Zp1',), "R coordinate of cell interface", "m"),
# 'RU': (('Zu',), "R coordinate of lower cell interface", "m"),
# 'RL': (('Zl',), "R coordinate of upper cell interface", "m"),
# horiz. differentials
dxC= (('Y','Xp1'), "x cell center separation", "meters"),
dyC= (('Yp1','X'), "y cell center separation", "meters"),
dxG= (('Yp1','X'), "x cell corner separation", "meters"),
dyG= (('Y','Xp1'), "y cell corner separation", "meters"),
# vert. differentials
drC= (('Zp1',), "r cell center separation", "m"),
drF= (('Z',), "r cell face separation", "m"),
# areas
rA= (('Y','X'), "r-face area at cell center", "m^2"),
rAw= (('Y','Xp1'), "r-face area at U point", "m^2"),
rAs= (('Yp1','X'), "r-face area at V point", "m^2"),
rAz= (('Yp1','Xp1'), "r-face area at cell corner", "m^2"),
# depth
Depth=(('Y','X'), "fluid thickness in r coordinates (at rest)", "meters"),
# thickness factors
HFacC=(('Z','Y','X'),
"vertical fraction of open cell at cell center", "none (0-1)"),
HFacW=(('Z','Y','Xp1'),
"vertical fraction of open cell at West face", "none (0-1)"),
HFacS=(('Z','Yp1','X'),
"vertical fraction of open cell at South face", "none (0-1)")
)
_grid_special_mapping = {
'Z': ('RC', (slice(None),0,0)),
'Zp1': ('RF', (slice(None),0,0)),
'Zu': ('RF', (slice(1,None),0,0)),
'Zl': ('RF', (slice(None,-1),0,0)),
'X': ('XC', (0,slice(None))),
'Y': ('YC', (slice(None),0)),
'Xp1': ('XG', (0,slice(None))),
'Yp1': ('YG', (slice(None),0)),
'rA': ('RAC', None),
'HFacC': ('hFacC', None),
'HFacW': ('hFacW', None),
'HFacS': ('hFacS', None),
}
_state_variables = OrderedDict(
# state
U= (('Z','Y','Xp1'), 'Zonal Component of Velocity', 'm/s'),
V= (('Z','Yp1','X'), 'Meridional Component of Velocity', 'm/s'),
W= (('Zl','Y','X'), 'Vertical Component of Velocity', 'm/s'),
T= (('Z','Y','X'), 'Potential Temperature', 'degC'),
S= (('Z','Y','X'), 'Salinity', 'psu'),
PH= (('Z','Y','X'), 'Hydrostatic Pressure Pot.(p/rho) Anomaly', 'm^2/s^2'),
PHL=(('Y','X'), 'Bottom Pressure Pot.(p/rho) Anomaly', 'm^2/s^2'),
Eta=(('Y','X'), 'Surface Height Anomaly', 'm'),
# tave
uVeltave=(('Z','Y','Xp1'), 'Zonal Component of Velocity', 'm/s'),
vVeltave=(('Z','Yp1','X'), 'Meridional Component of Velocity', 'm/s'),
wVeltave=(('Zl','Y','X'), 'Vertical Component of Velocity', 'm/s'),
Ttave=(('Z','Y','X'), 'Potential Temperature', 'degC'),
Stave=(('Z','Y','X'), 'Salinity', 'psu'),
PhHytave=(('Z','Y','X'), 'Hydrostatic Pressure Pot.(p/rho) Anomaly', 'm^2/s^2'),
PHLtave=(('Y','X'), 'Bottom Pressure Pot.(p/rho) Anomaly', 'm^2/s^2'),
ETAtave=(('Y','X'), 'Surface Height Anomaly', 'm'),
Convtave=(('Zl','Y','X'), "Convective Adjustment Index", "none [0-1]"),
Eta2tave=(('Y','X'), "Square of Surface Height Anomaly", "m^2"),
PHL2tave=(('Y','X'), 'Square of Hyd. Pressure Pot.(p/rho) Anomaly', 'm^4/s^4'),
sFluxtave=(('Y','X'), 'total salt flux (match salt-content variations), >0 increases salt', 'g/m^2/s'),
Tdiftave=(('Zl','Y','X'), "Vertical Diffusive Flux of Pot.Temperature", "degC.m^3/s"),
tFluxtave=(('Y','X'), "Total heat flux (match heat-content variations), >0 increases theta", "W/m^2"),
TTtave=(('Z','Y','X'), 'Squared Potential Temperature', 'degC^2'),
uFluxtave=(('Y','Xp1'), 'surface zonal momentum flux, positive -> increase u', 'N/m^2'),
UStave=(('Z','Y','Xp1'), "Zonal Transport of Salinity", "psu m/s"),
UTtave=(('Z','Y','Xp1'), "Zonal Transport of Potenial Temperature", "degC m/s"),
UUtave=(('Z','Y','Xp1'), "Zonal Transport of Zonal Momentum", "m^2/s^2"),
UVtave=(('Z','Yp1','Xp1'), 'Product of meridional and zonal velocity', 'm^2/s^2'),
vFluxtave=(('Yp1','X'), 'surface meridional momentum flux, positive -> increase v', 'N/m^2'),
VStave=(('Z','Yp1','X'), "Meridional Transport of Salinity", "psu m/s"),
VTtave=(('Z','Yp1','X'), "Meridional Transport of Potential Temperature", "degC m/s"),
VVtave=(('Z','Yp1','X'), 'Zonal Transport of Zonal Momentum', 'm^2/s^2'),
WStave=(('Zl','Y','X'), 'Vertical Transport of Salinity', "psu m/s"),
WTtave=(('Zl','Y','X'), 'Vertical Transport of Potential Temperature', "degC m/s")
)
def _force_native_endianness(var):
# possible values for byteorder are:
# = native
# < little-endian
# > big-endian
# | not applicable
# Below we check if the data type is not native or NA
if var.dtype.byteorder not in ['=', '|']:
# if endianness is specified explicitly, convert to the native type
data = var.data.astype(var.dtype.newbyteorder('='))
var = Variable(var.dims, data, var.attrs, var.encoding)
# if endian exists, remove it from the encoding.
var.encoding.pop('endian', None)
# check to see if encoding has a value for endian its 'native'
if not var.encoding.get('endian', 'native') is 'native':
raise NotImplementedError("Attempt to write non-native endian type, "
"this is not supported by the netCDF4 python "
"library.")
return var
def _parse_available_diagnostics(fname):
all_diags = {}
# add default diagnostics for grid, tave, and state
with open(fname) as f:
# will automatically skip first four header lines
for l in f:
c = re.split('\|',l)
if len(c)==7 and c[0].strip()!='Num':
key = c[1].strip()
levs = int(c[2].strip())
mate = c[3].strip()
if mate: mate = int(mate)
code = c[4]
units = c[5].strip()
desc = c[6].strip()
dds = MITgcmDiagnosticDescription(
key, code, units, desc, levs, mate)
# return dimensions, description, units
all_diags[key] = (dds.coords(), dds.desc, dds.units)
return all_diags
class MITgcmDiagnosticDescription(object):
def __init__(self, key, code, units=None, desc=None, levs=None, mate=None):
self.key = key
self.levs = levs
self.mate = mate
self.code = code
self.units = units
self.desc = desc
def coords(self):
"""Parse code to determine coordinates."""
hpoint = self.code[1]
rpoint = self.code[8]
rlev = self.code[9]
xcoords = {'U': 'Xp1', 'V': 'X', 'M': 'X', 'Z': 'Xp1'}
ycoords = {'U': 'Y', 'V': 'Yp1', 'M': 'Y', 'Z': 'Yp1'}
rcoords = {'M': 'Z', 'U': 'Zu', 'L': 'Zl'}
if rlev=='1' and self.levs==1:
return (ycoords[hpoint], xcoords[hpoint])
elif rlev=='R':
return (rcoords[rpoint], ycoords[hpoint], xcoords[hpoint])
else:
warnings.warn("Not sure what to do with rlev = " + rlev)
return (rcoords[rpoint], ycoords[hpoint], xcoords[hpoint])
def _parse_meta(fname):
"""Get the metadata as a dict out of the mitGCM mds .meta file."""
flds = {}
basename = re.match('(^.+?)\..+', os.path.basename(fname)).groups()[0]
flds['basename'] = basename
with open(fname) as f:
text = f.read()
# split into items
for item in re.split(';', text):
# remove whitespace at beginning
item = re.sub('^\s+', '', item)
#match = re.match('(\w+) = ', item)
match = re.match('(\w+) = (\[|\{)(.*)(\]|\})', item, re.DOTALL)
if match:
key, _, value, _ = match.groups()
# remove more whitespace
value = re.sub('^\s+', '', value)
value = re.sub('\s+$', '', value)
#print key,':', value
flds[key] = value
# now check the needed things are there
needed_keys = ['dimList','nDims','nrecords','dataprec']
for k in needed_keys:
assert flds.has_key(k)
# transform datatypes
flds['nDims'] = int(flds['nDims'])
flds['nrecords'] = int(flds['nrecords'])
# use big endian always
flds['dataprec'] = np.dtype(re.sub("'",'',flds['dataprec'])).newbyteorder('>')
flds['dimList'] = [[int(h) for h in
re.split(',', g)] for g in
re.split(',\n',flds['dimList'])]
if flds.has_key('fldList'):
flds['fldList'] = [re.match("'*(\w+)",g).groups()[0] for g in
re.split("'\s+'",flds['fldList'])]
assert flds['nrecords'] == len(flds['fldList'])
return flds
def _read_mds(fname, iternum=None, use_mmap=True,
force_dict=True, convert_big_endian=False):
"""Read an MITgcm .meta / .data file pair"""
if iternum is None:
istr = ''
else:
assert isinstance(iternum, int)
istr = '.%010d' % iternum
datafile = fname + istr + '.data'
metafile = fname + istr + '.meta'
# get metadata
meta = _parse_meta(metafile)
# why does the .meta file contain so much repeated info?
# just get the part we need
# and reverse order (numpy uses C order, mds is fortran)
shape = [g[0] for g in meta['dimList']][::-1]
assert len(shape) == meta['nDims']
# now add an extra for number of recs
nrecs = meta['nrecords']
shape.insert(0, nrecs)
# load and shape data
if use_mmap:
d = np.memmap(datafile, meta['dataprec'], 'r')
else:
d = np.fromfile(datafile, meta['dataprec'])
if convert_big_endian:
dtnew = d.dtype.newbyteorder('=')
d = d.astype(dtnew)
d.shape = shape
if nrecs == 1:
if meta.has_key('fldList'):
name = meta['fldList'][0]
else:
name = meta['basename']
if force_dict:
return {name: d[0]}
else:
return d[0]
else:
# need record names
out = {}
for n, name in enumerate(meta['fldList']):
out[name] = d[n]
return out
class MDSArrayWrapper(NDArrayMixin):
def __init__(self, array):
self.array = array
@property
def dtype(self):
dtype = self.array.dtype
def _list_all_mds_files(dirname):
"""Find all the meta / data files"""
files = glob(os.path.join(dirname, '*.meta'))
# strip the suffix
return [f[:-5] for f in files]
#class MemmapArrayWrapper(NumpyIndexingAdapter):
class MemmapArrayWrapper(NDArrayMixin):
def __init__(self, memmap_array):
self._memmap_array = memmap_array
@property
def array(self):
# We can't store the actual netcdf_variable object or its data array,
# because otherwise scipy complains about variables or files still
# referencing mmapped arrays when we try to close datasets without
# having read all data in the file.
return self._memmap_array
@property
def dtype(self):
return self._memmap_array.dtype
def __getitem__(self, key):
data = self._memmap_array.__getitem__(key)
return np.asarray(data)
_valid_geometry = ['Cartesian', 'SphericalPolar']
def open_mdsdataset(dirname, iters=None, deltaT=1,
prefix=None, ref_date=None, calendar=None,
ignore_pickup=True, geometry='Cartesian'):
"""Open MITgcm-style mds file output as xray datset."""
store = _MDSDataStore(dirname, iters, deltaT,
prefix, ref_date, calendar,
ignore_pickup, geometry)
return xray.Dataset.load_store(store)
class _MDSDataStore(AbstractDataStore):
"""Represents the entire directory of MITgcm mds output
including all grid variables. Similar in some ways to
netCDF.Dataset."""
def __init__(self, dirname, iters=None, deltaT=1,
prefix=None, ref_date=None, calendar=None,
ignore_pickup=True, geometry='Cartesian'):
"""iters: list of iteration numbers
deltaT: timestep
prefix: list of file prefixes (if None use all)
"""
assert geometry in _valid_geometry
self.geometry = geometry
# the directory where the files live
self.dirname = dirname
# storage dicts for variables and attributes
self._variables = OrderedDict()
self._attributes = OrderedDict()
self._dimensions = []
### figure out the mapping between diagnostics names and variable properties
# all possible diagnostics
diag_meta = _parse_available_diagnostics(
os.path.join(dirname, 'available_diagnostics.log'))
### read grid files
for k in _grid_variables:
if _grid_special_mapping.has_key(k):
fname = _grid_special_mapping[k][0]
sl = _grid_special_mapping[k][1]
else:
fname = k
sl = None
data = None
try:
data = _read_mds(os.path.join(dirname, fname), force_dict=False)
except IOError:
try:
data = _read_mds(os.path.join(dirname, fname.upper()),
force_dict=False)
except IOError:
warnings.warn("Couldn't load grid variable " + k)
if data is not None:
data = data[sl] if sl is not None else data.squeeze()
dims, desc, units = _grid_variables[k]
self._variables[k] = Variable(
dims, MemmapArrayWrapper(data), {'description': desc, 'units': units})
self._dimensions.append(k)
# now get variables from our iters
if iters is not None:
# create iteration array
iterdata = np.asarray(iters)
self._variables['iter'] = Variable(('time',), iterdata,
{'description': 'model timestep number'})
# create time array
timedata = np.asarray(iters)*deltaT
time_attrs = {'description': 'model time'}
if ref_date is not None:
time_attrs['units'] = 'seconds since %s' % ref_date
else:
time_attrs['units'] = 'seconds'
if calendar is not None:
time_attrs['calendar'] = calendar
self._variables['time'] = Variable(
('time',), timedata, time_attrs)
self._dimensions.append('time')
varnames = []
fnames = []
_data_vars = OrderedDict()
# look at first iter to get variable metadata
for f in glob(os.path.join(dirname, '*.%010d.meta' % iters[0])):
if ignore_pickup and re.search('pickup', f):
pass
else:
go = True
if prefix is not None:
bname = os.path.basename(f[:-16])
matches = [bname==p for p in prefix]
if not any(matches):
go = False
if go:
meta = _parse_meta(f)
if meta.has_key('fldList'):
flds = meta['fldList']
[varnames.append(fl) for fl in flds]
else:
varnames.append(meta['basename'])
fnames.append(os.path.join(dirname,meta['basename']))
# read data as dask arrays (should be an option)
vardata = {}
for k in varnames:
vardata[k] = []
for i in iters:
for f in fnames:
try:
data = _read_mds(f, i, force_dict=True)
for k in data.keys():
mwrap = MemmapArrayWrapper(data[k])
vardata[k].append(
da.from_array(mwrap, mwrap.shape))
except IOError:
# couldn't find the variable, remove it from the list
#print 'Removing %s from list (iter %g)' % (k, i)
varnames.remove(k)
# final loop to create Variable objects
for k in varnames:
try:
dims, desc, units = _state_variables[k]
except KeyError:
dims, desc, units = diag_meta[k]
# check for shape compatability
varshape = vardata[k][0].shape
varndims = len(varshape)
if len(dims) != varndims:
warnings.warn("Shape of variable data is not compatible "
"with expected number of dimensions. This "
"can arise if the 'levels' option is used "
"in data.diagnostics. Right now we have no "
"way to infer the level, so the variable is "
"skipped: " + k)
else:
# add time to dimension
dims_time = ('time',) + dims
# wrap variable in dask array
vardask = da.stack([da.from_array(d, varshape) for d in vardata[k]])
self._variables[k] = Variable( dims_time, vardask,
{'description': desc, 'units': units})
self._attributes = {'history': 'Some made up attribute'}
def get_variables(self):
return self._variables
def get_attrs(self):
return self._attributes
def get_dimensions(self):
return self._dimensions
def close(self):
pass
# from MITgcm netCDF grid file
# dimensions:
# Z = 30 ;
# Zp1 = 31 ;
# Zu = 30 ;
# Zl = 30 ;
# X = 25 ;
# Y = 40 ;
# Xp1 = 26 ;
# Yp1 = 41 ;
# variables:
# double Z(Z) ;
# Z:long_name = "vertical coordinate of cell center" ;
# Z:units = "meters" ;
# Z:positive = "up" ;
# double RC(Z) ;
# RC:description = "R coordinate of cell center" ;
# RC:units = "m" ;
# double Zp1(Zp1) ;
# Zp1:long_name = "vertical coordinate of cell interface" ;
# Zp1:units = "meters" ;
# Zp1:positive = "up" ;
# double RF(Zp1) ;
# RF:description = "R coordinate of cell interface" ;
# RF:units = "m" ;
# double Zu(Zu) ;
# Zu:long_name = "vertical coordinate of lower cell interface" ;
# Zu:units = "meters" ;
# Zu:positive = "up" ;
# double RU(Zu) ;
# RU:description = "R coordinate of upper interface" ;
# RU:units = "m" ;
# double Zl(Zl) ;
# Zl:long_name = "vertical coordinate of upper cell interface" ;
# Zl:units = "meters" ;
# Zl:positive = "up" ;
# double RL(Zl) ;
# RL:description = "R coordinate of lower interface" ;
# RL:units = "m" ;
# double drC(Zp1) ;
# drC:description = "r cell center separation" ;
# double drF(Z) ;
# drF:description = "r cell face separation" ;
# double X(X) ;
# X:long_name = "X-coordinate of cell center" ;
# X:units = "meters" ;
# double Y(Y) ;
# Y:long_name = "Y-Coordinate of cell center" ;
# Y:units = "meters" ;
# double XC(Y, X) ;
# XC:description = "X coordinate of cell center (T-P point)" ;
# XC:units = "degree_east" ;
# double YC(Y, X) ;
# YC:description = "Y coordinate of cell center (T-P point)" ;
# YC:units = "degree_north" ;
# double Xp1(Xp1) ;
# Xp1:long_name = "X-Coordinate of cell corner" ;
# Xp1:units = "meters" ;
# double Yp1(Yp1) ;
# Yp1:long_name = "Y-Coordinate of cell corner" ;
# Yp1:units = "meters" ;
# double XG(Yp1, Xp1) ;
# XG:description = "X coordinate of cell corner (Vorticity point)" ;
# XG:units = "degree_east" ;
# double YG(Yp1, Xp1) ;
# YG:description = "Y coordinate of cell corner (Vorticity point)" ;
# YG:units = "degree_north" ;
# double dxC(Y, Xp1) ;
# dxC:description = "x cell center separation" ;
# double dyC(Yp1, X) ;
# dyC:description = "y cell center separation" ;
# double dxF(Y, X) ;
# dxF:description = "x cell face separation" ;
# double dyF(Y, X) ;
# dyF:description = "y cell face separation" ;
# double dxG(Yp1, X) ;
# dxG:description = "x cell corner separation" ;
# double dyG(Y, Xp1) ;
# dyG:description = "y cell corner separation" ;
# double dxV(Yp1, Xp1) ;
# dxV:description = "x v-velocity separation" ;
# double dyU(Yp1, Xp1) ;
# dyU:description = "y u-velocity separation" ;
# double rA(Y, X) ;
# rA:description = "r-face area at cell center" ;
# double rAw(Y, Xp1) ;
# rAw:description = "r-face area at U point" ;
# double rAs(Yp1, X) ;
# rAs:description = "r-face area at V point" ;
# double rAz(Yp1, Xp1) ;
# rAz:description = "r-face area at cell corner" ;
# double fCori(Y, X) ;
# fCori:description = "Coriolis f at cell center" ;
# double fCoriG(Yp1, Xp1) ;
# fCoriG:description = "Coriolis f at cell corner" ;
# double R_low(Y, X) ;
# R_low:description = "base of fluid in r-units" ;
# double Ro_surf(Y, X) ;
# Ro_surf:description = "surface reference (at rest) position" ;
# double Depth(Y, X) ;
# Depth:description = "fluid thickness in r coordinates (at rest)" ;
# double HFacC(Z, Y, X) ;
# HFacC:description = "vertical fraction of open cell at cell center" ;
# double HFacW(Z, Y, Xp1) ;
# HFacW:description = "vertical fraction of open cell at West face" ;
# double HFacS(Z, Yp1, X) ;
# HFacS:description = "vertical fraction of open cell at South face" ; | mit | 1,379,219,783,563,502,600 | 38.747508 | 107 | 0.539474 | false |
LxMLS/lxmls-toolkit | lxmls/deep_learning/numpy_models/log_linear.py | 1 | 1802 | import numpy as np
from lxmls.deep_learning.utils import (
Model,
glorot_weight_init,
index2onehot,
logsumexp
)
class NumpyLogLinear(Model):
def __init__(self, **config):
# Initialize parameters
weight_shape = (config['input_size'], config['num_classes'])
# after Xavier Glorot et al
self.weight = glorot_weight_init(weight_shape, 'softmax')
self.bias = np.zeros((1, config['num_classes']))
self.learning_rate = config['learning_rate']
def log_forward(self, input=None):
"""Forward pass of the computation graph"""
# Linear transformation
z = np.dot(input, self.weight.T) + self.bias
# Softmax implemented in log domain
log_tilde_z = z - logsumexp(z, axis=1, keepdims=True)
return log_tilde_z
def predict(self, input=None):
"""Most probable class index"""
return np.argmax(np.exp(self.log_forward(input)), axis=1)
def update(self, input=None, output=None):
"""Stochastic Gradient Descent update"""
# Probabilities of each class
class_probabilities = np.exp(self.log_forward(input))
batch_size, num_classes = class_probabilities.shape
# Error derivative at softmax layer
I = index2onehot(output, num_classes)
error = (class_probabilities - I) / batch_size
# Weight gradient
gradient_weight = np.zeros(self.weight.shape)
for l in np.arange(batch_size):
gradient_weight += np.outer(error[l, :], input[l, :])
# Bias gradient
gradient_bias = np.sum(error, axis=0, keepdims=True)
# SGD update
self.weight = self.weight - self.learning_rate * gradient_weight
self.bias = self.bias - self.learning_rate * gradient_bias
| mit | -5,316,629,383,483,192,000 | 30.614035 | 72 | 0.622642 | false |
PierreFaniel/openerp-7.0 | stock_landed_costs/__init__.py | 2 | 1191 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2010-2014 Elico Corp (<http://www.elico-corp.com>)
# Alex Duan <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import product
from . import stock
from . import wizard
from . import product_price_history
from . import account_anglo_saxon_pos
from . import purchase
| agpl-3.0 | 2,126,731,422,349,904,400 | 43.111111 | 78 | 0.628044 | false |
amjad-twalo/icsisumm | icsisumm-primary-sys34_v1/nltk/nltk-0.9.2/nltk/corpus/reader/xmldocs.py | 9 | 1313 | # Natural Language Toolkit: XML Corpus Reader
#
# Copyright (C) 2001-2008 University of Pennsylvania
# Author: Steven Bird <[email protected]>
# URL: <http://nltk.sf.net>
# For license information, see LICENSE.TXT
"""
Corpus reader for corpora whose documents are xml files.
(note -- not named 'xml' to avoid conflicting w/ standard xml package)
"""
from api import CorpusReader
from util import *
from nltk.internals import deprecated
# Use the c version of ElementTree, which is faster, if possible:
try: from xml.etree import cElementTree as ElementTree
except ImportError: from nltk.etree import ElementTree
class XMLCorpusReader(CorpusReader):
"""
Corpus reader for corpora whose documents are xml files.
"""
def xml(self, files=None):
return concat([ElementTree.parse(filename).getroot()
for filename in self.abspaths(files)])
def raw(self, files=None):
return concat([open(filename).read()
for filename in self.abspaths(files)])
#{ Deprecated since 0.8
@deprecated("Use .raw() or .xml() instead.")
def read(self, items=None, format='xml'):
if format == 'raw': return self.raw(items)
if format == 'xml': return self.xml(items)
raise ValueError('bad format %r' % format)
#}
| gpl-3.0 | -7,514,673,354,922,926,000 | 31.825 | 70 | 0.676314 | false |
bigzz/ZenKernel_Shamu | Documentation/target/tcm_mod_builder.py | 2358 | 40707 | #!/usr/bin/python
# The TCM v4 multi-protocol fabric module generation script for drivers/target/$NEW_MOD
#
# Copyright (c) 2010 Rising Tide Systems
# Copyright (c) 2010 Linux-iSCSI.org
#
# Author: [email protected]
#
import os, sys
import subprocess as sub
import string
import re
import optparse
tcm_dir = ""
fabric_ops = []
fabric_mod_dir = ""
fabric_mod_port = ""
fabric_mod_init_port = ""
def tcm_mod_err(msg):
print msg
sys.exit(1)
def tcm_mod_create_module_subdir(fabric_mod_dir_var):
if os.path.isdir(fabric_mod_dir_var) == True:
return 1
print "Creating fabric_mod_dir: " + fabric_mod_dir_var
ret = os.mkdir(fabric_mod_dir_var)
if ret:
tcm_mod_err("Unable to mkdir " + fabric_mod_dir_var)
return
def tcm_mod_build_FC_include(fabric_mod_dir_var, fabric_mod_name):
global fabric_mod_port
global fabric_mod_init_port
buf = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_base.h"
print "Writing file: " + f
p = open(f, 'w');
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "#define " + fabric_mod_name.upper() + "_VERSION \"v0.1\"\n"
buf += "#define " + fabric_mod_name.upper() + "_NAMELEN 32\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_nacl {\n"
buf += " /* Binary World Wide unique Port Name for FC Initiator Nport */\n"
buf += " u64 nport_wwpn;\n"
buf += " /* ASCII formatted WWPN for FC Initiator Nport */\n"
buf += " char nport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_nodeacl() */\n"
buf += " struct se_node_acl se_node_acl;\n"
buf += "};\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_tpg {\n"
buf += " /* FC lport target portal group tag for TCM */\n"
buf += " u16 lport_tpgt;\n"
buf += " /* Pointer back to " + fabric_mod_name + "_lport */\n"
buf += " struct " + fabric_mod_name + "_lport *lport;\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tpg() */\n"
buf += " struct se_portal_group se_tpg;\n"
buf += "};\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_lport {\n"
buf += " /* SCSI protocol the lport is providing */\n"
buf += " u8 lport_proto_id;\n"
buf += " /* Binary World Wide unique Port Name for FC Target Lport */\n"
buf += " u64 lport_wwpn;\n"
buf += " /* ASCII formatted WWPN for FC Target Lport */\n"
buf += " char lport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_lport() */\n"
buf += " struct se_wwn lport_wwn;\n"
buf += "};\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
fabric_mod_port = "lport"
fabric_mod_init_port = "nport"
return
def tcm_mod_build_SAS_include(fabric_mod_dir_var, fabric_mod_name):
global fabric_mod_port
global fabric_mod_init_port
buf = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_base.h"
print "Writing file: " + f
p = open(f, 'w');
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "#define " + fabric_mod_name.upper() + "_VERSION \"v0.1\"\n"
buf += "#define " + fabric_mod_name.upper() + "_NAMELEN 32\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_nacl {\n"
buf += " /* Binary World Wide unique Port Name for SAS Initiator port */\n"
buf += " u64 iport_wwpn;\n"
buf += " /* ASCII formatted WWPN for Sas Initiator port */\n"
buf += " char iport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_nodeacl() */\n"
buf += " struct se_node_acl se_node_acl;\n"
buf += "};\n\n"
buf += "struct " + fabric_mod_name + "_tpg {\n"
buf += " /* SAS port target portal group tag for TCM */\n"
buf += " u16 tport_tpgt;\n"
buf += " /* Pointer back to " + fabric_mod_name + "_tport */\n"
buf += " struct " + fabric_mod_name + "_tport *tport;\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tpg() */\n"
buf += " struct se_portal_group se_tpg;\n"
buf += "};\n\n"
buf += "struct " + fabric_mod_name + "_tport {\n"
buf += " /* SCSI protocol the tport is providing */\n"
buf += " u8 tport_proto_id;\n"
buf += " /* Binary World Wide unique Port Name for SAS Target port */\n"
buf += " u64 tport_wwpn;\n"
buf += " /* ASCII formatted WWPN for SAS Target port */\n"
buf += " char tport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tport() */\n"
buf += " struct se_wwn tport_wwn;\n"
buf += "};\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
fabric_mod_port = "tport"
fabric_mod_init_port = "iport"
return
def tcm_mod_build_iSCSI_include(fabric_mod_dir_var, fabric_mod_name):
global fabric_mod_port
global fabric_mod_init_port
buf = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_base.h"
print "Writing file: " + f
p = open(f, 'w');
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "#define " + fabric_mod_name.upper() + "_VERSION \"v0.1\"\n"
buf += "#define " + fabric_mod_name.upper() + "_NAMELEN 32\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_nacl {\n"
buf += " /* ASCII formatted InitiatorName */\n"
buf += " char iport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_nodeacl() */\n"
buf += " struct se_node_acl se_node_acl;\n"
buf += "};\n\n"
buf += "struct " + fabric_mod_name + "_tpg {\n"
buf += " /* iSCSI target portal group tag for TCM */\n"
buf += " u16 tport_tpgt;\n"
buf += " /* Pointer back to " + fabric_mod_name + "_tport */\n"
buf += " struct " + fabric_mod_name + "_tport *tport;\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tpg() */\n"
buf += " struct se_portal_group se_tpg;\n"
buf += "};\n\n"
buf += "struct " + fabric_mod_name + "_tport {\n"
buf += " /* SCSI protocol the tport is providing */\n"
buf += " u8 tport_proto_id;\n"
buf += " /* ASCII formatted TargetName for IQN */\n"
buf += " char tport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tport() */\n"
buf += " struct se_wwn tport_wwn;\n"
buf += "};\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
fabric_mod_port = "tport"
fabric_mod_init_port = "iport"
return
def tcm_mod_build_base_includes(proto_ident, fabric_mod_dir_val, fabric_mod_name):
if proto_ident == "FC":
tcm_mod_build_FC_include(fabric_mod_dir_val, fabric_mod_name)
elif proto_ident == "SAS":
tcm_mod_build_SAS_include(fabric_mod_dir_val, fabric_mod_name)
elif proto_ident == "iSCSI":
tcm_mod_build_iSCSI_include(fabric_mod_dir_val, fabric_mod_name)
else:
print "Unsupported proto_ident: " + proto_ident
sys.exit(1)
return
def tcm_mod_build_configfs(proto_ident, fabric_mod_dir_var, fabric_mod_name):
buf = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_configfs.c"
print "Writing file: " + f
p = open(f, 'w');
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "#include <linux/module.h>\n"
buf += "#include <linux/moduleparam.h>\n"
buf += "#include <linux/version.h>\n"
buf += "#include <generated/utsrelease.h>\n"
buf += "#include <linux/utsname.h>\n"
buf += "#include <linux/init.h>\n"
buf += "#include <linux/slab.h>\n"
buf += "#include <linux/kthread.h>\n"
buf += "#include <linux/types.h>\n"
buf += "#include <linux/string.h>\n"
buf += "#include <linux/configfs.h>\n"
buf += "#include <linux/ctype.h>\n"
buf += "#include <asm/unaligned.h>\n\n"
buf += "#include <target/target_core_base.h>\n"
buf += "#include <target/target_core_fabric.h>\n"
buf += "#include <target/target_core_fabric_configfs.h>\n"
buf += "#include <target/target_core_configfs.h>\n"
buf += "#include <target/configfs_macros.h>\n\n"
buf += "#include \"" + fabric_mod_name + "_base.h\"\n"
buf += "#include \"" + fabric_mod_name + "_fabric.h\"\n\n"
buf += "/* Local pointer to allocated TCM configfs fabric module */\n"
buf += "struct target_fabric_configfs *" + fabric_mod_name + "_fabric_configfs;\n\n"
buf += "static struct se_node_acl *" + fabric_mod_name + "_make_nodeacl(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " struct config_group *group,\n"
buf += " const char *name)\n"
buf += "{\n"
buf += " struct se_node_acl *se_nacl, *se_nacl_new;\n"
buf += " struct " + fabric_mod_name + "_nacl *nacl;\n"
if proto_ident == "FC" or proto_ident == "SAS":
buf += " u64 wwpn = 0;\n"
buf += " u32 nexus_depth;\n\n"
buf += " /* " + fabric_mod_name + "_parse_wwn(name, &wwpn, 1) < 0)\n"
buf += " return ERR_PTR(-EINVAL); */\n"
buf += " se_nacl_new = " + fabric_mod_name + "_alloc_fabric_acl(se_tpg);\n"
buf += " if (!se_nacl_new)\n"
buf += " return ERR_PTR(-ENOMEM);\n"
buf += "//#warning FIXME: Hardcoded nexus depth in " + fabric_mod_name + "_make_nodeacl()\n"
buf += " nexus_depth = 1;\n"
buf += " /*\n"
buf += " * se_nacl_new may be released by core_tpg_add_initiator_node_acl()\n"
buf += " * when converting a NodeACL from demo mode -> explict\n"
buf += " */\n"
buf += " se_nacl = core_tpg_add_initiator_node_acl(se_tpg, se_nacl_new,\n"
buf += " name, nexus_depth);\n"
buf += " if (IS_ERR(se_nacl)) {\n"
buf += " " + fabric_mod_name + "_release_fabric_acl(se_tpg, se_nacl_new);\n"
buf += " return se_nacl;\n"
buf += " }\n"
buf += " /*\n"
buf += " * Locate our struct " + fabric_mod_name + "_nacl and set the FC Nport WWPN\n"
buf += " */\n"
buf += " nacl = container_of(se_nacl, struct " + fabric_mod_name + "_nacl, se_node_acl);\n"
if proto_ident == "FC" or proto_ident == "SAS":
buf += " nacl->" + fabric_mod_init_port + "_wwpn = wwpn;\n"
buf += " /* " + fabric_mod_name + "_format_wwn(&nacl->" + fabric_mod_init_port + "_name[0], " + fabric_mod_name.upper() + "_NAMELEN, wwpn); */\n\n"
buf += " return se_nacl;\n"
buf += "}\n\n"
buf += "static void " + fabric_mod_name + "_drop_nodeacl(struct se_node_acl *se_acl)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_nacl *nacl = container_of(se_acl,\n"
buf += " struct " + fabric_mod_name + "_nacl, se_node_acl);\n"
buf += " core_tpg_del_initiator_node_acl(se_acl->se_tpg, se_acl, 1);\n"
buf += " kfree(nacl);\n"
buf += "}\n\n"
buf += "static struct se_portal_group *" + fabric_mod_name + "_make_tpg(\n"
buf += " struct se_wwn *wwn,\n"
buf += " struct config_group *group,\n"
buf += " const char *name)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + "*" + fabric_mod_port + " = container_of(wwn,\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + ", " + fabric_mod_port + "_wwn);\n\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg;\n"
buf += " unsigned long tpgt;\n"
buf += " int ret;\n\n"
buf += " if (strstr(name, \"tpgt_\") != name)\n"
buf += " return ERR_PTR(-EINVAL);\n"
buf += " if (strict_strtoul(name + 5, 10, &tpgt) || tpgt > UINT_MAX)\n"
buf += " return ERR_PTR(-EINVAL);\n\n"
buf += " tpg = kzalloc(sizeof(struct " + fabric_mod_name + "_tpg), GFP_KERNEL);\n"
buf += " if (!tpg) {\n"
buf += " printk(KERN_ERR \"Unable to allocate struct " + fabric_mod_name + "_tpg\");\n"
buf += " return ERR_PTR(-ENOMEM);\n"
buf += " }\n"
buf += " tpg->" + fabric_mod_port + " = " + fabric_mod_port + ";\n"
buf += " tpg->" + fabric_mod_port + "_tpgt = tpgt;\n\n"
buf += " ret = core_tpg_register(&" + fabric_mod_name + "_fabric_configfs->tf_ops, wwn,\n"
buf += " &tpg->se_tpg, (void *)tpg,\n"
buf += " TRANSPORT_TPG_TYPE_NORMAL);\n"
buf += " if (ret < 0) {\n"
buf += " kfree(tpg);\n"
buf += " return NULL;\n"
buf += " }\n"
buf += " return &tpg->se_tpg;\n"
buf += "}\n\n"
buf += "static void " + fabric_mod_name + "_drop_tpg(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n\n"
buf += " core_tpg_deregister(se_tpg);\n"
buf += " kfree(tpg);\n"
buf += "}\n\n"
buf += "static struct se_wwn *" + fabric_mod_name + "_make_" + fabric_mod_port + "(\n"
buf += " struct target_fabric_configfs *tf,\n"
buf += " struct config_group *group,\n"
buf += " const char *name)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + ";\n"
if proto_ident == "FC" or proto_ident == "SAS":
buf += " u64 wwpn = 0;\n\n"
buf += " /* if (" + fabric_mod_name + "_parse_wwn(name, &wwpn, 1) < 0)\n"
buf += " return ERR_PTR(-EINVAL); */\n\n"
buf += " " + fabric_mod_port + " = kzalloc(sizeof(struct " + fabric_mod_name + "_" + fabric_mod_port + "), GFP_KERNEL);\n"
buf += " if (!" + fabric_mod_port + ") {\n"
buf += " printk(KERN_ERR \"Unable to allocate struct " + fabric_mod_name + "_" + fabric_mod_port + "\");\n"
buf += " return ERR_PTR(-ENOMEM);\n"
buf += " }\n"
if proto_ident == "FC" or proto_ident == "SAS":
buf += " " + fabric_mod_port + "->" + fabric_mod_port + "_wwpn = wwpn;\n"
buf += " /* " + fabric_mod_name + "_format_wwn(&" + fabric_mod_port + "->" + fabric_mod_port + "_name[0], " + fabric_mod_name.upper() + "_NAMELEN, wwpn); */\n\n"
buf += " return &" + fabric_mod_port + "->" + fabric_mod_port + "_wwn;\n"
buf += "}\n\n"
buf += "static void " + fabric_mod_name + "_drop_" + fabric_mod_port + "(struct se_wwn *wwn)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = container_of(wwn,\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + ", " + fabric_mod_port + "_wwn);\n"
buf += " kfree(" + fabric_mod_port + ");\n"
buf += "}\n\n"
buf += "static ssize_t " + fabric_mod_name + "_wwn_show_attr_version(\n"
buf += " struct target_fabric_configfs *tf,\n"
buf += " char *page)\n"
buf += "{\n"
buf += " return sprintf(page, \"" + fabric_mod_name.upper() + " fabric module %s on %s/%s\"\n"
buf += " \"on \"UTS_RELEASE\"\\n\", " + fabric_mod_name.upper() + "_VERSION, utsname()->sysname,\n"
buf += " utsname()->machine);\n"
buf += "}\n\n"
buf += "TF_WWN_ATTR_RO(" + fabric_mod_name + ", version);\n\n"
buf += "static struct configfs_attribute *" + fabric_mod_name + "_wwn_attrs[] = {\n"
buf += " &" + fabric_mod_name + "_wwn_version.attr,\n"
buf += " NULL,\n"
buf += "};\n\n"
buf += "static struct target_core_fabric_ops " + fabric_mod_name + "_ops = {\n"
buf += " .get_fabric_name = " + fabric_mod_name + "_get_fabric_name,\n"
buf += " .get_fabric_proto_ident = " + fabric_mod_name + "_get_fabric_proto_ident,\n"
buf += " .tpg_get_wwn = " + fabric_mod_name + "_get_fabric_wwn,\n"
buf += " .tpg_get_tag = " + fabric_mod_name + "_get_tag,\n"
buf += " .tpg_get_default_depth = " + fabric_mod_name + "_get_default_depth,\n"
buf += " .tpg_get_pr_transport_id = " + fabric_mod_name + "_get_pr_transport_id,\n"
buf += " .tpg_get_pr_transport_id_len = " + fabric_mod_name + "_get_pr_transport_id_len,\n"
buf += " .tpg_parse_pr_out_transport_id = " + fabric_mod_name + "_parse_pr_out_transport_id,\n"
buf += " .tpg_check_demo_mode = " + fabric_mod_name + "_check_false,\n"
buf += " .tpg_check_demo_mode_cache = " + fabric_mod_name + "_check_true,\n"
buf += " .tpg_check_demo_mode_write_protect = " + fabric_mod_name + "_check_true,\n"
buf += " .tpg_check_prod_mode_write_protect = " + fabric_mod_name + "_check_false,\n"
buf += " .tpg_alloc_fabric_acl = " + fabric_mod_name + "_alloc_fabric_acl,\n"
buf += " .tpg_release_fabric_acl = " + fabric_mod_name + "_release_fabric_acl,\n"
buf += " .tpg_get_inst_index = " + fabric_mod_name + "_tpg_get_inst_index,\n"
buf += " .release_cmd = " + fabric_mod_name + "_release_cmd,\n"
buf += " .shutdown_session = " + fabric_mod_name + "_shutdown_session,\n"
buf += " .close_session = " + fabric_mod_name + "_close_session,\n"
buf += " .stop_session = " + fabric_mod_name + "_stop_session,\n"
buf += " .fall_back_to_erl0 = " + fabric_mod_name + "_reset_nexus,\n"
buf += " .sess_logged_in = " + fabric_mod_name + "_sess_logged_in,\n"
buf += " .sess_get_index = " + fabric_mod_name + "_sess_get_index,\n"
buf += " .sess_get_initiator_sid = NULL,\n"
buf += " .write_pending = " + fabric_mod_name + "_write_pending,\n"
buf += " .write_pending_status = " + fabric_mod_name + "_write_pending_status,\n"
buf += " .set_default_node_attributes = " + fabric_mod_name + "_set_default_node_attrs,\n"
buf += " .get_task_tag = " + fabric_mod_name + "_get_task_tag,\n"
buf += " .get_cmd_state = " + fabric_mod_name + "_get_cmd_state,\n"
buf += " .queue_data_in = " + fabric_mod_name + "_queue_data_in,\n"
buf += " .queue_status = " + fabric_mod_name + "_queue_status,\n"
buf += " .queue_tm_rsp = " + fabric_mod_name + "_queue_tm_rsp,\n"
buf += " .is_state_remove = " + fabric_mod_name + "_is_state_remove,\n"
buf += " /*\n"
buf += " * Setup function pointers for generic logic in target_core_fabric_configfs.c\n"
buf += " */\n"
buf += " .fabric_make_wwn = " + fabric_mod_name + "_make_" + fabric_mod_port + ",\n"
buf += " .fabric_drop_wwn = " + fabric_mod_name + "_drop_" + fabric_mod_port + ",\n"
buf += " .fabric_make_tpg = " + fabric_mod_name + "_make_tpg,\n"
buf += " .fabric_drop_tpg = " + fabric_mod_name + "_drop_tpg,\n"
buf += " .fabric_post_link = NULL,\n"
buf += " .fabric_pre_unlink = NULL,\n"
buf += " .fabric_make_np = NULL,\n"
buf += " .fabric_drop_np = NULL,\n"
buf += " .fabric_make_nodeacl = " + fabric_mod_name + "_make_nodeacl,\n"
buf += " .fabric_drop_nodeacl = " + fabric_mod_name + "_drop_nodeacl,\n"
buf += "};\n\n"
buf += "static int " + fabric_mod_name + "_register_configfs(void)\n"
buf += "{\n"
buf += " struct target_fabric_configfs *fabric;\n"
buf += " int ret;\n\n"
buf += " printk(KERN_INFO \"" + fabric_mod_name.upper() + " fabric module %s on %s/%s\"\n"
buf += " \" on \"UTS_RELEASE\"\\n\"," + fabric_mod_name.upper() + "_VERSION, utsname()->sysname,\n"
buf += " utsname()->machine);\n"
buf += " /*\n"
buf += " * Register the top level struct config_item_type with TCM core\n"
buf += " */\n"
buf += " fabric = target_fabric_configfs_init(THIS_MODULE, \"" + fabric_mod_name[4:] + "\");\n"
buf += " if (IS_ERR(fabric)) {\n"
buf += " printk(KERN_ERR \"target_fabric_configfs_init() failed\\n\");\n"
buf += " return PTR_ERR(fabric);\n"
buf += " }\n"
buf += " /*\n"
buf += " * Setup fabric->tf_ops from our local " + fabric_mod_name + "_ops\n"
buf += " */\n"
buf += " fabric->tf_ops = " + fabric_mod_name + "_ops;\n"
buf += " /*\n"
buf += " * Setup default attribute lists for various fabric->tf_cit_tmpl\n"
buf += " */\n"
buf += " TF_CIT_TMPL(fabric)->tfc_wwn_cit.ct_attrs = " + fabric_mod_name + "_wwn_attrs;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_base_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_attrib_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_param_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_np_base_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_nacl_base_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_nacl_attrib_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_nacl_auth_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_nacl_param_cit.ct_attrs = NULL;\n"
buf += " /*\n"
buf += " * Register the fabric for use within TCM\n"
buf += " */\n"
buf += " ret = target_fabric_configfs_register(fabric);\n"
buf += " if (ret < 0) {\n"
buf += " printk(KERN_ERR \"target_fabric_configfs_register() failed\"\n"
buf += " \" for " + fabric_mod_name.upper() + "\\n\");\n"
buf += " return ret;\n"
buf += " }\n"
buf += " /*\n"
buf += " * Setup our local pointer to *fabric\n"
buf += " */\n"
buf += " " + fabric_mod_name + "_fabric_configfs = fabric;\n"
buf += " printk(KERN_INFO \"" + fabric_mod_name.upper() + "[0] - Set fabric -> " + fabric_mod_name + "_fabric_configfs\\n\");\n"
buf += " return 0;\n"
buf += "};\n\n"
buf += "static void __exit " + fabric_mod_name + "_deregister_configfs(void)\n"
buf += "{\n"
buf += " if (!" + fabric_mod_name + "_fabric_configfs)\n"
buf += " return;\n\n"
buf += " target_fabric_configfs_deregister(" + fabric_mod_name + "_fabric_configfs);\n"
buf += " " + fabric_mod_name + "_fabric_configfs = NULL;\n"
buf += " printk(KERN_INFO \"" + fabric_mod_name.upper() + "[0] - Cleared " + fabric_mod_name + "_fabric_configfs\\n\");\n"
buf += "};\n\n"
buf += "static int __init " + fabric_mod_name + "_init(void)\n"
buf += "{\n"
buf += " int ret;\n\n"
buf += " ret = " + fabric_mod_name + "_register_configfs();\n"
buf += " if (ret < 0)\n"
buf += " return ret;\n\n"
buf += " return 0;\n"
buf += "};\n\n"
buf += "static void __exit " + fabric_mod_name + "_exit(void)\n"
buf += "{\n"
buf += " " + fabric_mod_name + "_deregister_configfs();\n"
buf += "};\n\n"
buf += "MODULE_DESCRIPTION(\"" + fabric_mod_name.upper() + " series fabric driver\");\n"
buf += "MODULE_LICENSE(\"GPL\");\n"
buf += "module_init(" + fabric_mod_name + "_init);\n"
buf += "module_exit(" + fabric_mod_name + "_exit);\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
return
def tcm_mod_scan_fabric_ops(tcm_dir):
fabric_ops_api = tcm_dir + "include/target/target_core_fabric.h"
print "Using tcm_mod_scan_fabric_ops: " + fabric_ops_api
process_fo = 0;
p = open(fabric_ops_api, 'r')
line = p.readline()
while line:
if process_fo == 0 and re.search('struct target_core_fabric_ops {', line):
line = p.readline()
continue
if process_fo == 0:
process_fo = 1;
line = p.readline()
# Search for function pointer
if not re.search('\(\*', line):
continue
fabric_ops.append(line.rstrip())
continue
line = p.readline()
# Search for function pointer
if not re.search('\(\*', line):
continue
fabric_ops.append(line.rstrip())
p.close()
return
def tcm_mod_dump_fabric_ops(proto_ident, fabric_mod_dir_var, fabric_mod_name):
buf = ""
bufi = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_fabric.c"
print "Writing file: " + f
p = open(f, 'w')
if not p:
tcm_mod_err("Unable to open file: " + f)
fi = fabric_mod_dir_var + "/" + fabric_mod_name + "_fabric.h"
print "Writing file: " + fi
pi = open(fi, 'w')
if not pi:
tcm_mod_err("Unable to open file: " + fi)
buf = "#include <linux/slab.h>\n"
buf += "#include <linux/kthread.h>\n"
buf += "#include <linux/types.h>\n"
buf += "#include <linux/list.h>\n"
buf += "#include <linux/types.h>\n"
buf += "#include <linux/string.h>\n"
buf += "#include <linux/ctype.h>\n"
buf += "#include <asm/unaligned.h>\n"
buf += "#include <scsi/scsi.h>\n"
buf += "#include <scsi/scsi_host.h>\n"
buf += "#include <scsi/scsi_device.h>\n"
buf += "#include <scsi/scsi_cmnd.h>\n"
buf += "#include <scsi/libfc.h>\n\n"
buf += "#include <target/target_core_base.h>\n"
buf += "#include <target/target_core_fabric.h>\n"
buf += "#include <target/target_core_configfs.h>\n\n"
buf += "#include \"" + fabric_mod_name + "_base.h\"\n"
buf += "#include \"" + fabric_mod_name + "_fabric.h\"\n\n"
buf += "int " + fabric_mod_name + "_check_true(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " return 1;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_check_true(struct se_portal_group *);\n"
buf += "int " + fabric_mod_name + "_check_false(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_check_false(struct se_portal_group *);\n"
total_fabric_ops = len(fabric_ops)
i = 0
while i < total_fabric_ops:
fo = fabric_ops[i]
i += 1
# print "fabric_ops: " + fo
if re.search('get_fabric_name', fo):
buf += "char *" + fabric_mod_name + "_get_fabric_name(void)\n"
buf += "{\n"
buf += " return \"" + fabric_mod_name[4:] + "\";\n"
buf += "}\n\n"
bufi += "char *" + fabric_mod_name + "_get_fabric_name(void);\n"
continue
if re.search('get_fabric_proto_ident', fo):
buf += "u8 " + fabric_mod_name + "_get_fabric_proto_ident(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n"
buf += " u8 proto_id;\n\n"
buf += " switch (" + fabric_mod_port + "->" + fabric_mod_port + "_proto_id) {\n"
if proto_ident == "FC":
buf += " case SCSI_PROTOCOL_FCP:\n"
buf += " default:\n"
buf += " proto_id = fc_get_fabric_proto_ident(se_tpg);\n"
buf += " break;\n"
elif proto_ident == "SAS":
buf += " case SCSI_PROTOCOL_SAS:\n"
buf += " default:\n"
buf += " proto_id = sas_get_fabric_proto_ident(se_tpg);\n"
buf += " break;\n"
elif proto_ident == "iSCSI":
buf += " case SCSI_PROTOCOL_ISCSI:\n"
buf += " default:\n"
buf += " proto_id = iscsi_get_fabric_proto_ident(se_tpg);\n"
buf += " break;\n"
buf += " }\n\n"
buf += " return proto_id;\n"
buf += "}\n\n"
bufi += "u8 " + fabric_mod_name + "_get_fabric_proto_ident(struct se_portal_group *);\n"
if re.search('get_wwn', fo):
buf += "char *" + fabric_mod_name + "_get_fabric_wwn(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n\n"
buf += " return &" + fabric_mod_port + "->" + fabric_mod_port + "_name[0];\n"
buf += "}\n\n"
bufi += "char *" + fabric_mod_name + "_get_fabric_wwn(struct se_portal_group *);\n"
if re.search('get_tag', fo):
buf += "u16 " + fabric_mod_name + "_get_tag(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " return tpg->" + fabric_mod_port + "_tpgt;\n"
buf += "}\n\n"
bufi += "u16 " + fabric_mod_name + "_get_tag(struct se_portal_group *);\n"
if re.search('get_default_depth', fo):
buf += "u32 " + fabric_mod_name + "_get_default_depth(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " return 1;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_get_default_depth(struct se_portal_group *);\n"
if re.search('get_pr_transport_id\)\(', fo):
buf += "u32 " + fabric_mod_name + "_get_pr_transport_id(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " struct se_node_acl *se_nacl,\n"
buf += " struct t10_pr_registration *pr_reg,\n"
buf += " int *format_code,\n"
buf += " unsigned char *buf)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n"
buf += " int ret = 0;\n\n"
buf += " switch (" + fabric_mod_port + "->" + fabric_mod_port + "_proto_id) {\n"
if proto_ident == "FC":
buf += " case SCSI_PROTOCOL_FCP:\n"
buf += " default:\n"
buf += " ret = fc_get_pr_transport_id(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code, buf);\n"
buf += " break;\n"
elif proto_ident == "SAS":
buf += " case SCSI_PROTOCOL_SAS:\n"
buf += " default:\n"
buf += " ret = sas_get_pr_transport_id(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code, buf);\n"
buf += " break;\n"
elif proto_ident == "iSCSI":
buf += " case SCSI_PROTOCOL_ISCSI:\n"
buf += " default:\n"
buf += " ret = iscsi_get_pr_transport_id(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code, buf);\n"
buf += " break;\n"
buf += " }\n\n"
buf += " return ret;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_get_pr_transport_id(struct se_portal_group *,\n"
bufi += " struct se_node_acl *, struct t10_pr_registration *,\n"
bufi += " int *, unsigned char *);\n"
if re.search('get_pr_transport_id_len\)\(', fo):
buf += "u32 " + fabric_mod_name + "_get_pr_transport_id_len(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " struct se_node_acl *se_nacl,\n"
buf += " struct t10_pr_registration *pr_reg,\n"
buf += " int *format_code)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n"
buf += " int ret = 0;\n\n"
buf += " switch (" + fabric_mod_port + "->" + fabric_mod_port + "_proto_id) {\n"
if proto_ident == "FC":
buf += " case SCSI_PROTOCOL_FCP:\n"
buf += " default:\n"
buf += " ret = fc_get_pr_transport_id_len(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code);\n"
buf += " break;\n"
elif proto_ident == "SAS":
buf += " case SCSI_PROTOCOL_SAS:\n"
buf += " default:\n"
buf += " ret = sas_get_pr_transport_id_len(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code);\n"
buf += " break;\n"
elif proto_ident == "iSCSI":
buf += " case SCSI_PROTOCOL_ISCSI:\n"
buf += " default:\n"
buf += " ret = iscsi_get_pr_transport_id_len(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code);\n"
buf += " break;\n"
buf += " }\n\n"
buf += " return ret;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_get_pr_transport_id_len(struct se_portal_group *,\n"
bufi += " struct se_node_acl *, struct t10_pr_registration *,\n"
bufi += " int *);\n"
if re.search('parse_pr_out_transport_id\)\(', fo):
buf += "char *" + fabric_mod_name + "_parse_pr_out_transport_id(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " const char *buf,\n"
buf += " u32 *out_tid_len,\n"
buf += " char **port_nexus_ptr)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n"
buf += " char *tid = NULL;\n\n"
buf += " switch (" + fabric_mod_port + "->" + fabric_mod_port + "_proto_id) {\n"
if proto_ident == "FC":
buf += " case SCSI_PROTOCOL_FCP:\n"
buf += " default:\n"
buf += " tid = fc_parse_pr_out_transport_id(se_tpg, buf, out_tid_len,\n"
buf += " port_nexus_ptr);\n"
elif proto_ident == "SAS":
buf += " case SCSI_PROTOCOL_SAS:\n"
buf += " default:\n"
buf += " tid = sas_parse_pr_out_transport_id(se_tpg, buf, out_tid_len,\n"
buf += " port_nexus_ptr);\n"
elif proto_ident == "iSCSI":
buf += " case SCSI_PROTOCOL_ISCSI:\n"
buf += " default:\n"
buf += " tid = iscsi_parse_pr_out_transport_id(se_tpg, buf, out_tid_len,\n"
buf += " port_nexus_ptr);\n"
buf += " }\n\n"
buf += " return tid;\n"
buf += "}\n\n"
bufi += "char *" + fabric_mod_name + "_parse_pr_out_transport_id(struct se_portal_group *,\n"
bufi += " const char *, u32 *, char **);\n"
if re.search('alloc_fabric_acl\)\(', fo):
buf += "struct se_node_acl *" + fabric_mod_name + "_alloc_fabric_acl(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_nacl *nacl;\n\n"
buf += " nacl = kzalloc(sizeof(struct " + fabric_mod_name + "_nacl), GFP_KERNEL);\n"
buf += " if (!nacl) {\n"
buf += " printk(KERN_ERR \"Unable to allocate struct " + fabric_mod_name + "_nacl\\n\");\n"
buf += " return NULL;\n"
buf += " }\n\n"
buf += " return &nacl->se_node_acl;\n"
buf += "}\n\n"
bufi += "struct se_node_acl *" + fabric_mod_name + "_alloc_fabric_acl(struct se_portal_group *);\n"
if re.search('release_fabric_acl\)\(', fo):
buf += "void " + fabric_mod_name + "_release_fabric_acl(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " struct se_node_acl *se_nacl)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_nacl *nacl = container_of(se_nacl,\n"
buf += " struct " + fabric_mod_name + "_nacl, se_node_acl);\n"
buf += " kfree(nacl);\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_release_fabric_acl(struct se_portal_group *,\n"
bufi += " struct se_node_acl *);\n"
if re.search('tpg_get_inst_index\)\(', fo):
buf += "u32 " + fabric_mod_name + "_tpg_get_inst_index(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " return 1;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_tpg_get_inst_index(struct se_portal_group *);\n"
if re.search('\*release_cmd\)\(', fo):
buf += "void " + fabric_mod_name + "_release_cmd(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_release_cmd(struct se_cmd *);\n"
if re.search('shutdown_session\)\(', fo):
buf += "int " + fabric_mod_name + "_shutdown_session(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_shutdown_session(struct se_session *);\n"
if re.search('close_session\)\(', fo):
buf += "void " + fabric_mod_name + "_close_session(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_close_session(struct se_session *);\n"
if re.search('stop_session\)\(', fo):
buf += "void " + fabric_mod_name + "_stop_session(struct se_session *se_sess, int sess_sleep , int conn_sleep)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_stop_session(struct se_session *, int, int);\n"
if re.search('fall_back_to_erl0\)\(', fo):
buf += "void " + fabric_mod_name + "_reset_nexus(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_reset_nexus(struct se_session *);\n"
if re.search('sess_logged_in\)\(', fo):
buf += "int " + fabric_mod_name + "_sess_logged_in(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_sess_logged_in(struct se_session *);\n"
if re.search('sess_get_index\)\(', fo):
buf += "u32 " + fabric_mod_name + "_sess_get_index(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_sess_get_index(struct se_session *);\n"
if re.search('write_pending\)\(', fo):
buf += "int " + fabric_mod_name + "_write_pending(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_write_pending(struct se_cmd *);\n"
if re.search('write_pending_status\)\(', fo):
buf += "int " + fabric_mod_name + "_write_pending_status(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_write_pending_status(struct se_cmd *);\n"
if re.search('set_default_node_attributes\)\(', fo):
buf += "void " + fabric_mod_name + "_set_default_node_attrs(struct se_node_acl *nacl)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_set_default_node_attrs(struct se_node_acl *);\n"
if re.search('get_task_tag\)\(', fo):
buf += "u32 " + fabric_mod_name + "_get_task_tag(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_get_task_tag(struct se_cmd *);\n"
if re.search('get_cmd_state\)\(', fo):
buf += "int " + fabric_mod_name + "_get_cmd_state(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_get_cmd_state(struct se_cmd *);\n"
if re.search('queue_data_in\)\(', fo):
buf += "int " + fabric_mod_name + "_queue_data_in(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_queue_data_in(struct se_cmd *);\n"
if re.search('queue_status\)\(', fo):
buf += "int " + fabric_mod_name + "_queue_status(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_queue_status(struct se_cmd *);\n"
if re.search('queue_tm_rsp\)\(', fo):
buf += "int " + fabric_mod_name + "_queue_tm_rsp(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_queue_tm_rsp(struct se_cmd *);\n"
if re.search('is_state_remove\)\(', fo):
buf += "int " + fabric_mod_name + "_is_state_remove(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_is_state_remove(struct se_cmd *);\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
ret = pi.write(bufi)
if ret:
tcm_mod_err("Unable to write fi: " + fi)
pi.close()
return
def tcm_mod_build_kbuild(fabric_mod_dir_var, fabric_mod_name):
buf = ""
f = fabric_mod_dir_var + "/Makefile"
print "Writing file: " + f
p = open(f, 'w')
if not p:
tcm_mod_err("Unable to open file: " + f)
buf += fabric_mod_name + "-objs := " + fabric_mod_name + "_fabric.o \\\n"
buf += " " + fabric_mod_name + "_configfs.o\n"
buf += "obj-$(CONFIG_" + fabric_mod_name.upper() + ") += " + fabric_mod_name + ".o\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
return
def tcm_mod_build_kconfig(fabric_mod_dir_var, fabric_mod_name):
buf = ""
f = fabric_mod_dir_var + "/Kconfig"
print "Writing file: " + f
p = open(f, 'w')
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "config " + fabric_mod_name.upper() + "\n"
buf += " tristate \"" + fabric_mod_name.upper() + " fabric module\"\n"
buf += " depends on TARGET_CORE && CONFIGFS_FS\n"
buf += " default n\n"
buf += " ---help---\n"
buf += " Say Y here to enable the " + fabric_mod_name.upper() + " fabric module\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
return
def tcm_mod_add_kbuild(tcm_dir, fabric_mod_name):
buf = "obj-$(CONFIG_" + fabric_mod_name.upper() + ") += " + fabric_mod_name.lower() + "/\n"
kbuild = tcm_dir + "/drivers/target/Makefile"
f = open(kbuild, 'a')
f.write(buf)
f.close()
return
def tcm_mod_add_kconfig(tcm_dir, fabric_mod_name):
buf = "source \"drivers/target/" + fabric_mod_name.lower() + "/Kconfig\"\n"
kconfig = tcm_dir + "/drivers/target/Kconfig"
f = open(kconfig, 'a')
f.write(buf)
f.close()
return
def main(modname, proto_ident):
# proto_ident = "FC"
# proto_ident = "SAS"
# proto_ident = "iSCSI"
tcm_dir = os.getcwd();
tcm_dir += "/../../"
print "tcm_dir: " + tcm_dir
fabric_mod_name = modname
fabric_mod_dir = tcm_dir + "drivers/target/" + fabric_mod_name
print "Set fabric_mod_name: " + fabric_mod_name
print "Set fabric_mod_dir: " + fabric_mod_dir
print "Using proto_ident: " + proto_ident
if proto_ident != "FC" and proto_ident != "SAS" and proto_ident != "iSCSI":
print "Unsupported proto_ident: " + proto_ident
sys.exit(1)
ret = tcm_mod_create_module_subdir(fabric_mod_dir)
if ret:
print "tcm_mod_create_module_subdir() failed because module already exists!"
sys.exit(1)
tcm_mod_build_base_includes(proto_ident, fabric_mod_dir, fabric_mod_name)
tcm_mod_scan_fabric_ops(tcm_dir)
tcm_mod_dump_fabric_ops(proto_ident, fabric_mod_dir, fabric_mod_name)
tcm_mod_build_configfs(proto_ident, fabric_mod_dir, fabric_mod_name)
tcm_mod_build_kbuild(fabric_mod_dir, fabric_mod_name)
tcm_mod_build_kconfig(fabric_mod_dir, fabric_mod_name)
input = raw_input("Would you like to add " + fabric_mod_name + "to drivers/target/Makefile..? [yes,no]: ")
if input == "yes" or input == "y":
tcm_mod_add_kbuild(tcm_dir, fabric_mod_name)
input = raw_input("Would you like to add " + fabric_mod_name + "to drivers/target/Kconfig..? [yes,no]: ")
if input == "yes" or input == "y":
tcm_mod_add_kconfig(tcm_dir, fabric_mod_name)
return
parser = optparse.OptionParser()
parser.add_option('-m', '--modulename', help='Module name', dest='modname',
action='store', nargs=1, type='string')
parser.add_option('-p', '--protoident', help='Protocol Ident', dest='protoident',
action='store', nargs=1, type='string')
(opts, args) = parser.parse_args()
mandatories = ['modname', 'protoident']
for m in mandatories:
if not opts.__dict__[m]:
print "mandatory option is missing\n"
parser.print_help()
exit(-1)
if __name__ == "__main__":
main(str(opts.modname), opts.protoident)
| gpl-2.0 | -9,097,885,983,816,448,000 | 37.842557 | 162 | 0.572678 | false |
Brunux/shityjobs | shityjobs/users/migrations/0001_initial.py | 1 | 2931 | # -*- coding: utf-8 -*-
# Generated by Django 1.11.2 on 2017-10-21 00:17
from __future__ import unicode_literals
import django.contrib.auth.models
import django.contrib.auth.validators
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [
('auth', '0008_alter_user_username_max_length'),
]
operations = [
migrations.CreateModel(
name='User',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('password', models.CharField(max_length=128, verbose_name='password')),
('last_login', models.DateTimeField(blank=True, null=True, verbose_name='last login')),
('is_superuser', models.BooleanField(default=False, help_text='Designates that this user has all permissions without explicitly assigning them.', verbose_name='superuser status')),
('username', models.CharField(error_messages={'unique': 'A user with that username already exists.'}, help_text='Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only.', max_length=150, unique=True, validators=[django.contrib.auth.validators.UnicodeUsernameValidator()], verbose_name='username')),
('first_name', models.CharField(blank=True, max_length=30, verbose_name='first name')),
('last_name', models.CharField(blank=True, max_length=30, verbose_name='last name')),
('email', models.EmailField(blank=True, max_length=254, verbose_name='email address')),
('is_staff', models.BooleanField(default=False, help_text='Designates whether the user can log into this admin site.', verbose_name='staff status')),
('is_active', models.BooleanField(default=True, help_text='Designates whether this user should be treated as active. Unselect this instead of deleting accounts.', verbose_name='active')),
('date_joined', models.DateTimeField(default=django.utils.timezone.now, verbose_name='date joined')),
('groups', models.ManyToManyField(blank=True, help_text='The groups this user belongs to. A user will get all permissions granted to each of their groups.', related_name='user_set', related_query_name='user', to='auth.Group', verbose_name='groups')),
('user_permissions', models.ManyToManyField(blank=True, help_text='Specific permissions for this user.', related_name='user_set', related_query_name='user', to='auth.Permission', verbose_name='user permissions')),
],
options={
'verbose_name': 'user',
'verbose_name_plural': 'users',
'abstract': False,
},
managers=[
('objects', django.contrib.auth.models.UserManager()),
],
),
]
| mit | -8,972,983,870,719,763,000 | 62.717391 | 329 | 0.644831 | false |
starrybeam/samba | source4/scripting/bin/gen_hresult.py | 19 | 9170 | #!/usr/bin/env python
#
# Unix SMB/CIFS implementation.
#
# HRESULT Error definitions
#
# Copyright (C) Noel Power <[email protected]> 2014
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import sys, os.path, io, string
# parsed error data
Errors = []
# error data model
class ErrorDef:
def __init__(self):
self.err_code = ""
self.err_define = None
self.err_string = ""
self.isWinError = False
self.linenum = ""
def escapeString( input ):
output = input.replace('"','\\"')
output = output.replace("\\<","\\\\<")
output = output.replace('\t',"")
return output
def parseErrorDescriptions( input_file, isWinError ):
# read in the data
fileContents = open(input_file,"r")
count = 0;
for line in fileContents:
content = line.strip().split(None,1)
# start new error definition ?
if line.startswith("0x"):
newError = ErrorDef()
newError.err_code = content[0]
# escape the usual suspects
if len(content) > 1:
newError.err_string = escapeString(content[1])
newError.linenum = count
newError.isWinError = isWinError
Errors.append(newError)
else:
if len(Errors) == 0:
print "Error parsing file as line %d"%count
sys.exit()
err = Errors[-1]
if err.err_define == None:
err.err_define = "HRES_" + content[0]
else:
if len(content) > 0:
desc = escapeString(line.strip())
if len(desc):
if err.err_string == "":
err.err_string = desc
else:
err.err_string = err.err_string + " " + desc
count = count + 1
fileContents.close()
print "parsed %d lines generated %d error definitions"%(count,len(Errors))
def write_license(out_file):
out_file.write("/*\n")
out_file.write(" * Unix SMB/CIFS implementation.\n")
out_file.write(" *\n")
out_file.write(" * HRESULT Error definitions\n")
out_file.write(" *\n")
out_file.write(" * Copyright (C) Noel Power <[email protected]> 2014\n")
out_file.write(" *\n")
out_file.write(" * This program is free software; you can redistribute it and/or modify\n")
out_file.write(" * it under the terms of the GNU General Public License as published by\n")
out_file.write(" * the Free Software Foundation; either version 3 of the License, or\n")
out_file.write(" * (at your option) any later version.\n")
out_file.write(" *\n")
out_file.write(" * This program is distributed in the hope that it will be useful,\n")
out_file.write(" * but WITHOUT ANY WARRANTY; without even the implied warranty of\n")
out_file.write(" * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n")
out_file.write(" * GNU General Public License for more details.\n")
out_file.write(" *\n")
out_file.write(" * You should have received a copy of the GNU General Public License\n")
out_file.write(" * along with this program. If not, see <http://www.gnu.org/licenses/>.\n")
out_file.write(" */\n")
out_file.write("\n")
def generateHeaderFile(out_file):
write_license(out_file)
out_file.write("#ifndef _HRESULT_H_\n")
out_file.write("#define _HRESULT_H_\n\n")
macro_magic = "#if defined(HAVE_IMMEDIATE_STRUCTURES)\n"
macro_magic += "typedef struct {uint32_t h;} HRESULT;\n"
macro_magic += "#define HRES_ERROR(x) ((HRESULT) { x })\n"
macro_magic += "#define HRES_ERROR_V(x) ((x).h)\n"
macro_magic += "#else\n"
macro_magic += "typedef uint32_t HRESULT;\n"
macro_magic += "#define HRES_ERROR(x) (x)\n"
macro_magic += "#define HRES_ERROR_V(x) (x)\n"
macro_magic += "#endif\n"
macro_magic += "\n"
macro_magic += "#define HRES_IS_OK(x) (HRES_ERROR_V(x) == 0)\n"
macro_magic += "#define HRES_IS_EQUAL(x,y) (HRES_ERROR_V(x) == HRES_ERROR_V(y))\n"
out_file.write(macro_magic)
out_file.write("\n\n")
out_file.write("/*\n")
out_file.write(" * The following error codes are autogenerated from [MS-ERREF]\n")
out_file.write(" * see http://msdn.microsoft.com/en-us/library/cc704587.aspx\n")
out_file.write(" */\n")
out_file.write("\n")
for err in Errors:
line = "#define {0:49} HRES_ERROR({1})\n".format(err.err_define ,err.err_code)
out_file.write(line)
out_file.write("\nconst char *hresult_errstr_const(HRESULT err_code);\n")
out_file.write("\nconst char *hresult_errstr(HRESULT err_code);\n")
out_file.write("\n#define FACILITY_WIN32 0x0007\n")
out_file.write("#define WIN32_FROM_HRESULT(x) (HRES_ERROR_V(x) == 0 ? HRES_ERROR_V(x) : ~((FACILITY_WIN32 << 16) | 0x80000000) & HRES_ERROR_V(x))\n")
out_file.write("#define HRESULT_IS_LIKELY_WERR(x) ((HRES_ERROR_V(x) & 0xFFFF0000) == 0x80070000)\n")
out_file.write("\n\n\n#endif /*_HRESULT_H_*/")
def generateSourceFile(out_file):
write_license(out_file)
out_file.write("#include \"includes.h\"\n")
out_file.write("#include \"hresult.h\"\n")
out_file.write("/*\n")
out_file.write(" * The following error codes and descriptions are autogenerated from [MS-ERREF]\n")
out_file.write(" * see http://msdn.microsoft.com/en-us/library/cc704587.aspx\n")
out_file.write(" */\n")
out_file.write("\n")
out_file.write("static const struct {\n")
out_file.write(" HRESULT error_code;\n")
out_file.write(" const char *error_str;\n")
out_file.write(" const char *error_message;\n")
out_file.write("} hresult_errs[] = {\n")
for err in Errors:
out_file.write(" {\n")
if err.isWinError:
out_file.write(" HRESULT_FROM_WIN32(%s),\n"%err.err_define)
out_file.write(" \"HRESULT_FROM_WIN32(%s)\",\n"%err.err_define)
else:
out_file.write(" %s,\n"%err.err_define)
out_file.write(" \"%s\",\n"%err.err_define)
out_file.write(" \"%s\"\n"%err.err_string)
out_file.write(" },\n")
out_file.write("};\n")
out_file.write("\n")
out_file.write("const char *hresult_errstr_const(HRESULT err_code)\n")
out_file.write("{\n");
out_file.write(" const char *result = NULL;\n")
out_file.write(" int i;\n")
out_file.write(" for (i = 0; i < ARRAY_SIZE(hresult_errs); ++i) {\n")
out_file.write(" if (HRES_IS_EQUAL(err_code, hresult_errs[i].error_code)) {\n")
out_file.write(" result = hresult_errs[i].error_message;\n")
out_file.write(" break;\n")
out_file.write(" }\n")
out_file.write(" }\n")
out_file.write(" /* convert & check win32 error space? */\n")
out_file.write(" if (result == NULL && HRESULT_IS_LIKELY_WERR(err_code)) {\n")
out_file.write(" WERROR wErr = W_ERROR(WIN32_FROM_HRESULT(err_code));\n")
out_file.write(" result = get_friendly_werror_msg(wErr);\n")
out_file.write(" }\n")
out_file.write(" return result;\n")
out_file.write("};\n")
out_file.write("\n")
out_file.write("const char *hresult_errstr(HRESULT err_code)\n")
out_file.write("{\n");
out_file.write(" static char msg[20];\n")
out_file.write(" int i;\n")
out_file.write("\n")
out_file.write(" for (i = 0; i < ARRAY_SIZE(hresult_errs); i++) {\n")
out_file.write(" if (HRES_IS_EQUAL(err_code, hresult_errs[i].error_code)) {\n")
out_file.write(" return hresult_errs[i].error_str;\n")
out_file.write(" }\n")
out_file.write(" }\n")
out_file.write(" snprintf(msg, sizeof(msg), \"HRES code 0x%08x\", HRES_ERROR_V(err_code));\n")
out_file.write(" return msg;\n")
out_file.write("};\n")
# Very simple script to generate files hresult.c & hresult.h
# The script simply takes a text file as input, format of input file is
# very simple and is just the content of a html table ( such as that found
# in http://msdn.microsoft.com/en-us/library/cc704587.aspx ) copied and
# pasted into a text file
def main ():
input_file1 = None;
filename = "hresult"
headerfile_name = filename + ".h"
sourcefile_name = filename + ".c"
if len(sys.argv) > 1:
input_file1 = sys.argv[1]
else:
print "usage: %s winerrorfile"%(sys.argv[0])
sys.exit()
parseErrorDescriptions(input_file1, False)
out_file = open(headerfile_name,"w")
generateHeaderFile(out_file)
out_file.close()
out_file = open(sourcefile_name,"w")
generateSourceFile(out_file)
if __name__ == '__main__':
main()
| gpl-3.0 | -7,217,370,936,568,156 | 39.755556 | 153 | 0.609924 | false |
pkoutsias/SickRage | lib/guessit/language.py | 3 | 11578 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# GuessIt - A library for guessing information from filenames
# Copyright (c) 2013 Nicolas Wack <[email protected]>
#
# GuessIt is free software; you can redistribute it and/or modify it under
# the terms of the Lesser GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# GuessIt is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# Lesser GNU General Public License for more details.
#
# You should have received a copy of the Lesser GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import absolute_import, division, print_function, unicode_literals
import re
import logging
from guessit import u
from guessit.textutils import find_words
from babelfish import Language, Country
import babelfish
from guessit.guess import Guess
__all__ = ['Language', 'UNDETERMINED',
'search_language', 'guess_language']
log = logging.getLogger(__name__)
UNDETERMINED = babelfish.Language('und')
SYN = {('und', None): ['unknown', 'inconnu', 'unk', 'un'],
('ell', None): ['gr', 'greek'],
('spa', None): ['esp', 'español'],
('fra', None): ['français', 'vf', 'vff', 'vfi'],
('swe', None): ['se'],
('por', 'BR'): ['po', 'pb', 'pob', 'br', 'brazilian'],
('cat', None): ['català'],
('ces', None): ['cz'],
('ukr', None): ['ua'],
('zho', None): ['cn'],
('jpn', None): ['jp'],
('hrv', None): ['scr'],
('mul', None): ['multi', 'dl'], # http://scenelingo.wordpress.com/2009/03/24/what-does-dl-mean/
}
class GuessitConverter(babelfish.LanguageReverseConverter):
_with_country_regexp = re.compile('(.*)\((.*)\)')
_with_country_regexp2 = re.compile('(.*)-(.*)')
def __init__(self):
self.guessit_exceptions = {}
for (alpha3, country), synlist in SYN.items():
for syn in synlist:
self.guessit_exceptions[syn.lower()] = (alpha3, country, None)
@property
def codes(self):
return (babelfish.language_converters['alpha3b'].codes |
babelfish.language_converters['alpha2'].codes |
babelfish.language_converters['name'].codes |
babelfish.language_converters['opensubtitles'].codes |
babelfish.country_converters['name'].codes |
frozenset(self.guessit_exceptions.keys()))
@staticmethod
def convert(alpha3, country=None, script=None):
return str(babelfish.Language(alpha3, country, script))
def reverse(self, name):
with_country = (GuessitConverter._with_country_regexp.match(name) or
GuessitConverter._with_country_regexp2.match(name))
name = u(name.lower())
if with_country:
lang = Language.fromguessit(with_country.group(1).strip())
lang.country = babelfish.Country.fromguessit(with_country.group(2).strip())
return lang.alpha3, lang.country.alpha2 if lang.country else None, lang.script or None
# exceptions come first, as they need to override a potential match
# with any of the other guessers
try:
return self.guessit_exceptions[name]
except KeyError:
pass
for conv in [babelfish.Language,
babelfish.Language.fromalpha3b,
babelfish.Language.fromalpha2,
babelfish.Language.fromname,
babelfish.Language.fromopensubtitles]:
try:
c = conv(name)
return c.alpha3, c.country, c.script
except (ValueError, babelfish.LanguageReverseError):
pass
raise babelfish.LanguageReverseError(name)
babelfish.language_converters['guessit'] = GuessitConverter()
COUNTRIES_SYN = {'ES': ['españa'],
'GB': ['UK'],
'BR': ['brazilian', 'bra'],
# FIXME: this one is a bit of a stretch, not sure how to do
# it properly, though...
'MX': ['Latinoamérica', 'latin america']
}
class GuessitCountryConverter(babelfish.CountryReverseConverter):
def __init__(self):
self.guessit_exceptions = {}
for alpha2, synlist in COUNTRIES_SYN.items():
for syn in synlist:
self.guessit_exceptions[syn.lower()] = alpha2
@property
def codes(self):
return (babelfish.country_converters['name'].codes |
frozenset(babelfish.COUNTRIES.values()) |
frozenset(self.guessit_exceptions.keys()))
@staticmethod
def convert(alpha2):
if alpha2 == 'GB':
return 'UK'
return str(Country(alpha2))
def reverse(self, name):
# exceptions come first, as they need to override a potential match
# with any of the other guessers
try:
return self.guessit_exceptions[name.lower()]
except KeyError:
pass
try:
return babelfish.Country(name.upper()).alpha2
except ValueError:
pass
for conv in [babelfish.Country.fromname]:
try:
return conv(name).alpha2
except babelfish.CountryReverseError:
pass
raise babelfish.CountryReverseError(name)
babelfish.country_converters['guessit'] = GuessitCountryConverter()
# list of common words which could be interpreted as languages, but which
# are far too common to be able to say they represent a language in the
# middle of a string (where they most likely carry their commmon meaning)
LNG_COMMON_WORDS = frozenset([
# english words
'is', 'it', 'am', 'mad', 'men', 'man', 'run', 'sin', 'st', 'to',
'no', 'non', 'war', 'min', 'new', 'car', 'day', 'bad', 'bat', 'fan',
'fry', 'cop', 'zen', 'gay', 'fat', 'one', 'cherokee', 'got', 'an', 'as',
'cat', 'her', 'be', 'hat', 'sun', 'may', 'my', 'mr', 'rum', 'pi', 'bb',
'bt', 'tv', 'aw', 'by', 'md', 'mp', 'cd', 'lt', 'gt', 'in', 'ad', 'ice',
'ay', 'at', 'star', 'so',
# french words
'bas', 'de', 'le', 'son', 'ne', 'ca', 'ce', 'et', 'que',
'mal', 'est', 'vol', 'or', 'mon', 'se', 'je', 'tu', 'me',
'ne', 'ma', 'va', 'au',
# japanese words,
'wa', 'ga', 'ao',
# spanish words
'la', 'el', 'del', 'por', 'mar', 'al',
# other
'ind', 'arw', 'ts', 'ii', 'bin', 'chan', 'ss', 'san', 'oss', 'iii',
'vi', 'ben', 'da', 'lt', 'ch', 'sr', 'ps', 'cx',
# new from babelfish
'mkv', 'avi', 'dmd', 'the', 'dis', 'cut', 'stv', 'des', 'dia', 'and',
'cab', 'sub', 'mia', 'rim', 'las', 'une', 'par', 'srt', 'ano', 'toy',
'job', 'gag', 'reel', 'www', 'for', 'ayu', 'csi', 'ren', 'moi', 'sur',
'fer', 'fun', 'two', 'big', 'psy', 'air',
# movie title
'brazil',
# release groups
'bs', # Bosnian
'kz',
# countries
'gt', 'lt', 'im',
# part/pt
'pt'
])
LNG_COMMON_WORDS_STRICT = frozenset(['brazil'])
subtitle_prefixes = ['sub', 'subs', 'st', 'vost', 'subforced', 'fansub', 'hardsub']
subtitle_suffixes = ['subforced', 'fansub', 'hardsub', 'sub', 'subs']
lang_prefixes = ['true']
all_lang_prefixes_suffixes = subtitle_prefixes + subtitle_suffixes + lang_prefixes
def find_possible_languages(string, allowed_languages=None):
"""Find possible languages in the string
:return: list of tuple (property, Language, lang_word, word)
"""
common_words = None
if allowed_languages:
common_words = LNG_COMMON_WORDS_STRICT
else:
common_words = LNG_COMMON_WORDS
words = find_words(string)
valid_words = []
for word in words:
lang_word = word.lower()
key = 'language'
for prefix in subtitle_prefixes:
if lang_word.startswith(prefix):
lang_word = lang_word[len(prefix):]
key = 'subtitleLanguage'
for suffix in subtitle_suffixes:
if lang_word.endswith(suffix):
lang_word = lang_word[:len(suffix)-1]
key = 'subtitleLanguage'
for prefix in lang_prefixes:
if lang_word.startswith(prefix):
lang_word = lang_word[len(prefix):]
if lang_word not in common_words and word.lower() not in common_words:
try:
lang = Language.fromguessit(lang_word)
if allowed_languages:
if lang.name.lower() in allowed_languages or lang.alpha2.lower() in allowed_languages or lang.alpha3.lower() in allowed_languages:
valid_words.append((key, lang, lang_word, word))
# Keep language with alpha2 equivalent. Others are probably
# uncommon languages.
elif lang == 'mul' or hasattr(lang, 'alpha2'):
valid_words.append((key, lang, lang_word, word))
except babelfish.Error:
pass
return valid_words
def search_language(string, allowed_languages=None):
"""Looks for language patterns, and if found return the language object,
its group span and an associated confidence.
you can specify a list of allowed languages using the lang_filter argument,
as in lang_filter = [ 'fr', 'eng', 'spanish' ]
>>> search_language('movie [en].avi')['language']
<Language [en]>
>>> search_language('the zen fat cat and the gay mad men got a new fan', allowed_languages = ['en', 'fr', 'es'])
"""
if allowed_languages:
allowed_languages = set(Language.fromguessit(lang) for lang in allowed_languages)
confidence = 1.0 # for all of them
for prop, language, lang, word in find_possible_languages(string, allowed_languages):
pos = string.find(word)
end = pos + len(word)
# only allow those languages that have a 2-letter code, those that
# don't are too esoteric and probably false matches
# if language.lang not in lng3_to_lng2:
# continue
# confidence depends on alpha2, alpha3, english name, ...
if len(lang) == 2:
confidence = 0.8
elif len(lang) == 3:
confidence = 0.9
elif prop == 'subtitleLanguage':
confidence = 0.6 # Subtitle prefix found with language
else:
# Note: we could either be really confident that we found a
# language or assume that full language names are too
# common words and lower their confidence accordingly
confidence = 0.3 # going with the low-confidence route here
return Guess({prop: language}, confidence=confidence, input=string, span=(pos, end))
return None
def guess_language(text): # pragma: no cover
"""Guess the language in which a body of text is written.
This uses the external guess-language python module, and will fail and return
Language(Undetermined) if it is not installed.
"""
try:
from guess_language import guessLanguage
return Language.fromguessit(guessLanguage(text))
except ImportError:
log.error('Cannot detect the language of the given text body, missing dependency: guess-language')
log.error('Please install it from PyPI, by doing eg: pip install guess-language')
return UNDETERMINED
| gpl-3.0 | 1,698,536,484,428,574,000 | 35.278997 | 150 | 0.589908 | false |
azumimuo/family-xbmc-addon | script.module.livestreamer/lib/livestreamer/packages/flashmedia/types.py | 42 | 45802 | from .compat import OrderedDict, is_py2, str, bytes, integer_types, string_types
from .util import pack_bytes_into
from collections import namedtuple
from struct import Struct, error as struct_error
from inspect import getargspec
(SCRIPT_DATA_TYPE_NUMBER, SCRIPT_DATA_TYPE_BOOLEAN,
SCRIPT_DATA_TYPE_STRING, SCRIPT_DATA_TYPE_OBJECT,
SCRIPT_DATA_TYPE_RESERVED, SCRIPT_DATA_TYPE_NULL,
SCRIPT_DATA_TYPE_UNDEFINED, SCRIPT_DATA_TYPE_REFERENCE,
SCRIPT_DATA_TYPE_ECMAARRAY, SCRIPT_DATA_TYPE_OBJECTEND,
SCRIPT_DATA_TYPE_STRICTARRAY, SCRIPT_DATA_TYPE_DATE,
SCRIPT_DATA_TYPE_LONGSTRING) = range(13)
SCRIPT_DATA_TYPE_AMF3 = 0x11
(AMF3_TYPE_UNDEFINED, AMF3_TYPE_NULL, AMF3_TYPE_FALSE, AMF3_TYPE_TRUE,
AMF3_TYPE_INTEGER, AMF3_TYPE_DOUBLE, AMF3_TYPE_STRING, AMF3_TYPE_XML_DOC,
AMF3_TYPE_DATE, AMF3_TYPE_ARRAY, AMF3_TYPE_OBJECT, AMF3_TYPE_XML,
AMF3_TYPE_BYTE_ARRAY, AMF3_TYPE_VECTOR_INT, AMF3_TYPE_VECTOR_UINT,
AMF3_TYPE_VECTOR_DOUBLE, AMF3_TYPE_VECTOR_OBJECT, AMF3_TYPE_DICT) = range(0x12)
AMF3_EMPTY_STRING = 0x01
AMF3_DYNAMIC_OBJECT = 0x0b
AMF3_CLOSE_DYNAMIC_OBJECT = 0x01
AMF3_CLOSE_DYNAMIC_ARRAY = 0x01
AMF3_MIN_INTEGER = -268435456
AMF3_MAX_INTEGER = 268435455
class PrimitiveType(Struct):
def __call__(self, *args):
return self.pack(*args)
def read(self, fd):
data = fd.read(self.size)
if len(data) != self.size:
raise IOError("Unable to read required amount of data")
return self.unpack(data)[0]
class PrimitiveClassType(PrimitiveType):
def __init__(self, format, cls):
self.cls = cls
PrimitiveType.__init__(self, format)
def pack(self, val):
return PrimitiveType.pack(self, *val)
def pack_into(self, buf, offset, val):
return PrimitiveType.pack_into(self, buf, offset, *val)
def unpack(self, data):
vals = PrimitiveType.unpack(self, data)
rval = self.cls(*vals)
return (rval,)
def unpack_from(self, buf, offset):
vals = PrimitiveType.unpack_from(self, buf, offset)
rval = self.cls(*vals)
return (rval,)
class DynamicType(object):
def __new__(cls, *args, **kwargs):
return cls.pack(*args, **kwargs)
@classmethod
def size(cls, val):
raise NotImplementedError
@classmethod
def pack(cls, val):
raise NotImplementedError
@classmethod
def pack_into(cls, buf, offset, val):
raise NotImplementedError
@classmethod
def read(cls, fd):
raise NotImplementedError
@classmethod
def unpack_from(cls, buf, offset):
raise NotImplementedError
@classmethod
def unpack(cls, buf):
return cls.unpack_from(buf, 0)
class TwosComplement(PrimitiveType):
def __init__(self, primitive):
self.primitive = primitive
bits = self.primitive.size * 8
self.maxval = 1 << bits
self.midval = self.maxval >> 1
self.upper = self.midval - 1
self.lower = -self.midval
@property
def size(self):
return 3
def pack(self, val):
if val < self.lower or val > self.upper:
msg = "{0} format requires {1} <= number <= {2}".format(self.primitive.format,
self.lower, self.upper)
raise struct_error(msg)
if val < 0:
val = val + self.maxval
return self.primitive.pack(val)
def pack_into(self, buf, offset, val):
if val < self.lower or val > self.upper:
msg = "{0} format requires {1} <= number <= {2}".format(self.primitive.format,
self.lower, self.upper)
raise struct_error(msg)
if val < 0:
val = val + self.maxval
return self.primitive.pack_into(buf, offset, val)
def unpack(self, data):
val = self.primitive.unpack(data)[0]
if val & self.midval:
val = val - self.maxval
return (val,)
def unpack_from(self, buf, offset):
val = self.primitive.unpack_from(buf, offset)[0]
if val & self.midval:
val = val - self.maxval
return (val,)
class HighLowCombo(PrimitiveType):
def __init__(self, format, highbits, reverse=True):
PrimitiveType.__init__(self, format)
self.highbits = highbits
self.lowmask = (1 << highbits) - 1
self.reverse = reverse
self.lower = 0
self.upper = (1 << (self.size * 8)) - 1
def pack(self, val):
if val < self.lower or val > self.upper:
msg = "{0} format requires {1} <= number <= {2}".format(self.format,
self.lower, self.upper)
raise struct_error(msg)
if self.reverse:
high = val >> self.highbits
low = val & self.lowmask
else:
high = val & self.lowmask
low = val >> self.highbits
return PrimitiveType.pack(self, high, low)
def pack_into(self, buf, offset, val):
if val < self.lower or val > self.upper:
msg = "{0} format requires {1} <= number <= {2}".format(self.format,
self.lower, self.upper)
raise struct_error(msg)
if self.reverse:
high = val >> self.highbits
low = val & self.lowmask
else:
high = val & self.lowmask
low = val >> self.highbits
return PrimitiveType.pack_into(self, buf, offset, high, low)
def unpack(self, data):
high, low = PrimitiveType.unpack(self, data)
if self.reverse:
ret = high << self.highbits
ret |= low
else:
ret = high
ret |= low << self.highbits
return (ret,)
def unpack_from(self, buf, offset):
high, low = PrimitiveType.unpack_from(self, buf, offset)
if self.reverse:
ret = high << self.highbits
ret |= low
else:
ret = high
ret |= low << self.highbits
return (ret,)
class FixedPoint(PrimitiveType):
def __init__(self, format, bits):
self.divider = float(1 << bits)
PrimitiveType.__init__(self, format)
def pack(self, val):
val *= self.divider
return PrimitiveType.pack(self, int(val))
def pack_into(self, buf, offset, val):
val *= self.divider
return PrimitiveType.pack_into(self, buf, offset, int(val))
def unpack(self, data):
val = PrimitiveType.unpack(self, data)[0]
val /= self.divider
return (val,)
def unpack_from(self, buf, offset):
val = PrimitiveType.unpack_from(self, buf, offset)[0]
val /= self.divider
return (val,)
class PaddedBytes(PrimitiveType):
def __init__(self, size, padding):
self.padded_size = size
self.padding = bytes(padding, "ascii")
@property
def size(self):
return self.padded_size
def pack(self, val):
rval = bytes(val[:self.size], "ascii")
if len(rval) < self.size:
paddinglen = self.size - len(rval)
rval += self.padding * paddinglen
return rval
def pack_into(self, buf, offset, val):
rval = bytes(val[:self.size], "ascii")
offset = pack_bytes_into(buf, offset, rval)
if len(rval) < self.size:
paddinglen = self.size - len(rval)
offset = pack_bytes_into(buf, offset, self.padding * paddinglen)
def unpack(self, data):
return (str(data.rstrip(self.padding), "ascii"),)
def unpack_from(self, buf, offset):
data = buf[offset:offset + self.padded_size]
return (str(data.rstrip(self.padding), "ascii"),)
""" 8-bit integer """
U8 = PrimitiveType("B")
S8 = PrimitiveType("b")
""" 16-bit integer """
U16BE = PrimitiveType(">H")
S16BE = PrimitiveType(">h")
U16LE = PrimitiveType("<H")
S16LE = PrimitiveType("<h")
""" 24-bit integer """
U24BE = HighLowCombo(">HB", 8, True)
S24BE = TwosComplement(U24BE)
U24LE = HighLowCombo("<HB", 16, False)
S24LE = TwosComplement(U24LE)
""" 32-bit integer """
U32BE = PrimitiveType(">I")
S32BE = PrimitiveType(">i")
U32LE = PrimitiveType("<I")
S32LE = PrimitiveType("<i")
""" 64-bit integer """
U64BE = PrimitiveType(">Q")
U64LE = PrimitiveType("<Q")
""" Fixed point numbers """
U8_8BE = FixedPoint(">H", 8)
S8_8BE = FixedPoint(">h", 8)
U16_16BE = FixedPoint("<I", 16)
S16_16BE = FixedPoint("<i", 16)
U8_8LE = FixedPoint("<H", 8)
S8_8LE = FixedPoint("<h", 8)
U16_16LE = FixedPoint("<I", 16)
S16_16LE = FixedPoint("<i", 16)
DoubleLE = PrimitiveType("<d")
DoubleBE = PrimitiveType(">d")
""" Various types """
FourCC = PaddedBytes(4, " ")
""" Script data types """
ScriptDataNumber = DoubleBE
ScriptDataBoolean = PrimitiveType("?")
class U3264(DynamicType):
@classmethod
def size(cls, val, version):
if version == 1:
return U64BE.size
else:
return U32BE.size
@classmethod
def pack(cls, val, version):
if version == 1:
return U64BE(val)
else:
return U32BE(val)
@classmethod
def pack_into(cls, buf, offset, val, version):
if version == 1:
prim = U64BE
else:
prim = U32BE
prim.pack_into(buf, offset, val)
return offset + prim.size
@classmethod
def read(cls, fd, version):
if version == 1:
return U64BE.read(fd)
else:
return U32BE.read(fd)
@classmethod
def unpack_from(cls, buf, offset, version):
if version == 1:
prim = U64BE
else:
prim = U32BE
rval = prim.unpack_from(buf, offset)
offset += prim.size
return (rval, offset)
class String(DynamicType):
@classmethod
def size(cls, *args, **kwargs):
return len(cls.pack(*args, **kwargs))
@classmethod
def pack(cls, val, encoding="utf8", errors="ignore"):
rval = val.encode(encoding, errors)
return rval
@classmethod
def pack_into(cls, buf, offset, val,
encoding="utf8", errors="ignore"):
return pack_bytes_into(buf, offset,
val.encode(encoding, errors))
class CString(String):
EndMarker = b"\x00"
@classmethod
def pack(cls, *args, **kwargs):
rval = String.pack(*args, **kwargs)
rval += CString.EndMarker
return rval
@classmethod
def pack_into(cls, buf, offset, *args, **kwargs):
offset = String.pack_into(buf, offset, *args, **kwargs)
U8.pack_into(buf, offset, 0)
return offset + 1
@classmethod
def read(cls, fd, encoding="utf8", errors="ignore"):
rval = b""
while True:
ch = fd.read(1)
if len(ch) == 0 or ch == CString.EndMarker:
break
rval += ch
return rval.decode(encoding, errors)
@classmethod
def unpack_from(cls, buf, offset, encoding="utf8", errors="ignore"):
end = buf[offset:].find(b"\x00")
rval = buf[offset:offset + end].decode(encoding, errors)
offset += end + 1
return (rval, offset)
class ScriptDataType(object):
__identifier__ = 0
class ScriptDataString(String):
__size_primitive__ = U16BE
@classmethod
def pack(cls, val, *args, **kwargs):
rval = String.pack(val, *args, **kwargs)
size = cls.__size_primitive__(len(rval))
return size + rval
@classmethod
def pack_into(cls, buf, offset, val, *args, **kwargs):
noffset = String.pack_into(buf, offset + cls.__size_primitive__.size,
val, *args, **kwargs)
cls.__size_primitive__.pack_into(buf, offset,
(noffset - offset) - cls.__size_primitive__.size)
return noffset
@classmethod
def read(cls, fd, encoding="utf8", errors="ignore"):
size = cls.__size_primitive__.read(fd)
data = fd.read(size)
return data.decode(encoding, errors)
@classmethod
def unpack_from(cls, buf, offset, encoding="utf8", errors="ignore"):
size = cls.__size_primitive__.unpack_from(buf, offset)[0]
offset += cls.__size_primitive__.size
data = buf[offset:offset + size].decode(encoding, errors)
offset += size
return (data, offset)
class ScriptDataLongString(ScriptDataString):
__size_primitive__ = U32BE
class ScriptDataObjectEnd(Exception):
pass
class ScriptDataObject(OrderedDict, ScriptDataType):
__identifier__ = SCRIPT_DATA_TYPE_OBJECT
@classmethod
def size(cls, val):
size = 3
for key, value in val.items():
size += ScriptDataString.size(key)
size += ScriptDataValue.size(value)
return size
@classmethod
def pack(cls, val):
rval = b""
for key, value in val.items():
rval += ScriptDataString(key)
rval += ScriptDataValue.pack(value)
# Zero length key + object end identifier ends object
rval += ScriptDataString("")
rval += U8(SCRIPT_DATA_TYPE_OBJECTEND)
return rval
@classmethod
def pack_into(cls, buf, offset, val):
for key, value in val.items():
offset = ScriptDataString.pack_into(buf, offset, key)
offset = ScriptDataValue.pack_into(buf, offset, value)
# Zero length key + object end identifier ends object
offset = ScriptDataString.pack_into(buf, offset, "")
U8.pack_into(buf, offset, SCRIPT_DATA_TYPE_OBJECTEND)
return offset + U8.size
@classmethod
def read(cls, fd):
rval = cls()
while True:
try:
key = ScriptDataString.read(fd)
value = ScriptDataValue.read(fd)
except ScriptDataObjectEnd:
break
if len(key) == 0:
break
rval[key] = value
return rval
@classmethod
def unpack_from(cls, buf, offset):
rval = cls()
while True:
try:
key, offset = ScriptDataString.unpack_from(buf, offset)
value, offset = ScriptDataValue.unpack_from(buf, offset)
except ScriptDataObjectEnd:
offset += 1
break
if len(key) == 0:
break
rval[key] = value
return (rval, offset)
class ScriptDataECMAArray(ScriptDataObject):
__identifier__ = SCRIPT_DATA_TYPE_ECMAARRAY
@classmethod
def size(cls, val):
return 4 + ScriptDataObject.size(val)
@classmethod
def pack(cls, val):
rval = U32BE(len(val))
rval += ScriptDataObject.pack(val)
return rval
@classmethod
def pack_into(cls, buf, offset, val):
U32BE.pack_into(buf, offset, len(val))
return ScriptDataObject.pack_into(buf, offset + U32BE.size,
val)
@classmethod
def read(cls, fd):
U32BE.read(fd) # Length
val = ScriptDataObject.read(fd)
return cls(val)
@classmethod
def unpack_from(cls, buf, offset):
U32BE.unpack_from(buf, offset) # Length
offset += U32BE.size
val, offset = ScriptDataObject.unpack_from(buf, offset)
return (cls(val), offset)
class ScriptDataStrictArray(DynamicType):
@classmethod
def size(cls, val):
size = 4
for sdval in val:
size += ScriptDataValue.size(sdval)
return size
@classmethod
def pack(cls, val):
rval = U32BE(len(val))
for sdval in val:
rval += ScriptDataValue.pack(sdval)
return rval
@classmethod
def pack_into(cls, buf, offset, val):
U32BE.pack_into(buf, offset, len(val))
offset += U32BE.size
for sdval in val:
offset = ScriptDataValue.pack_into(buf, offset, sdval)
return offset
@classmethod
def read(cls, fd):
length = U32BE.read(fd)
rval = []
for i in range(length):
val = ScriptDataValue.read(fd)
rval.append(val)
return rval
@classmethod
def unpack_from(cls, buf, offset):
length = U32BE.unpack_from(buf, offset)[0]
offset += U32BE.size
rval = []
for i in range(length):
val, offset = ScriptDataValue.unpack_from(buf, offset)
rval.append(val)
return (rval, offset)
ScriptDataDate = namedtuple("ScriptDataDate", ["timestamp", "offset"])
ScriptDataDateStruct = PrimitiveClassType(">dh", ScriptDataDate)
ScriptDataDate.__identifier__ = SCRIPT_DATA_TYPE_DATE
ScriptDataDate.__packer__ = ScriptDataDateStruct
ScriptDataReference = namedtuple("ScriptDataReference", ["reference"])
ScriptDataReferenceStruct = PrimitiveClassType(">H", ScriptDataReference)
ScriptDataReference.__identifier__ = SCRIPT_DATA_TYPE_REFERENCE
ScriptDataReference.__packer__ = ScriptDataReferenceStruct
class ScriptDataValue(DynamicType, ScriptDataType):
# key: identifier, value: unpacker class
PrimitiveReaders = {
SCRIPT_DATA_TYPE_NUMBER: ScriptDataNumber,
SCRIPT_DATA_TYPE_BOOLEAN: ScriptDataBoolean,
SCRIPT_DATA_TYPE_REFERENCE: ScriptDataReferenceStruct,
SCRIPT_DATA_TYPE_DATE: ScriptDataDateStruct,
}
DynamicReaders = {
SCRIPT_DATA_TYPE_STRING: ScriptDataString,
SCRIPT_DATA_TYPE_LONGSTRING: ScriptDataLongString,
SCRIPT_DATA_TYPE_OBJECT: ScriptDataObject,
SCRIPT_DATA_TYPE_ECMAARRAY: ScriptDataECMAArray,
SCRIPT_DATA_TYPE_STRICTARRAY: ScriptDataStrictArray,
}
Readers = PrimitiveReaders.copy()
Readers.update(DynamicReaders)
@classmethod
def size(cls, val):
size = 1
if isinstance(val, bool):
size += ScriptDataBoolean.size
elif isinstance(val, (int, float)):
size += ScriptDataNumber.size
elif isinstance(val, list):
size += ScriptDataStrictArray.size(val)
elif isinstance(val, string_types):
if len(val) > 0xFFFF:
size += ScriptDataLongString.size(val)
else:
size += ScriptDataString.size(val)
elif isinstance(val, ScriptDataType):
cls = type(val)
size += cls.size(val)
elif type(val) in (ScriptDataDate, ScriptDataReference):
cls = type(val)
packer = cls.__packer__
size += packer.size
elif isinstance(val, AMF3ObjectBase):
size += U8.size
size += AMF3Value.size(val)
return size
@classmethod
def pack(cls, val):
rval = b""
if isinstance(val, bool):
rval += U8(SCRIPT_DATA_TYPE_BOOLEAN)
rval += ScriptDataBoolean(val)
elif isinstance(val, (int, float)):
rval += U8(SCRIPT_DATA_TYPE_NUMBER)
rval += ScriptDataNumber(val)
elif isinstance(val, list):
rval += U8(SCRIPT_DATA_TYPE_STRICTARRAY)
rval += ScriptDataStrictArray(val)
elif isinstance(val, string_types):
if len(val) > 0xFFFF:
rval += U8(SCRIPT_DATA_TYPE_LONGSTRING)
rval += ScriptDataLongString(val)
else:
rval += U8(SCRIPT_DATA_TYPE_STRING)
rval += ScriptDataString(val)
elif val is None:
rval += U8(SCRIPT_DATA_TYPE_NULL)
elif isinstance(val, ScriptDataType):
cls = type(val)
rval += U8(cls.__identifier__)
rval += cls.pack(val)
elif type(val) in (ScriptDataDate, ScriptDataReference):
cls = type(val)
packer = cls.__packer__
rval += U8(cls.__identifier__)
rval += packer.pack(val)
elif isinstance(val, AMF3ObjectBase):
rval += U8(SCRIPT_DATA_TYPE_AMF3)
rval += AMF3Value.pack(val)
else:
raise ValueError("Unable to pack value of type {0}".format(type(val)))
return rval
@classmethod
def pack_into(cls, buf, offset, val):
if isinstance(val, bool):
U8.pack_into(buf, offset, SCRIPT_DATA_TYPE_BOOLEAN)
offset += U8.size
ScriptDataBoolean.pack_into(buf, offset, val)
offset += ScriptDataBoolean.size
elif isinstance(val, (int, float)):
U8.pack_into(buf, offset, SCRIPT_DATA_TYPE_NUMBER)
offset += U8.size
ScriptDataNumber.pack_into(buf, offset, val)
offset += ScriptDataNumber.size
elif isinstance(val, list):
U8.pack_into(buf, offset, SCRIPT_DATA_TYPE_STRICTARRAY)
offset += U8.size
offset = ScriptDataStrictArray.pack_into(buf, offset, val)
elif isinstance(val, string_types):
if len(val) > 0xFFFF:
U8.pack_into(buf, offset, SCRIPT_DATA_TYPE_LONGSTRING)
offset += U8.size
offset = ScriptDataLongString.pack_into(buf, offset, val)
else:
U8.pack_into(buf, offset, SCRIPT_DATA_TYPE_STRING)
offset += U8.size
offset = ScriptDataString.pack_into(buf, offset, val)
elif val is None:
U8.pack_into(buf, offset, SCRIPT_DATA_TYPE_NULL)
elif isinstance(val, ScriptDataType):
cls = type(val)
U8.pack_into(buf, offset, cls.__identifier__)
offset += U8.size
offset = cls.pack_into(buf, offset, val)
elif type(val) in (ScriptDataDate, ScriptDataReference):
cls = type(val)
packer = cls.__packer__
U8.pack_into(buf, offset, cls.__identifier__)
offset += U8.size
packer.pack_into(buf, offset, val)
offset += packer.size
else:
raise ValueError("Unable to pack value of type {0}".format(type(val)))
return offset
@classmethod
def read(cls, fd, marker=None):
if marker is None:
type_ = U8.read(fd)
else:
type_ = marker
if type_ == SCRIPT_DATA_TYPE_AMF3:
return AMF3Value.read(fd)
elif type_ in ScriptDataValue.Readers:
return ScriptDataValue.Readers[type_].read(fd)
elif type_ == SCRIPT_DATA_TYPE_OBJECTEND:
raise ScriptDataObjectEnd
elif (type_ == SCRIPT_DATA_TYPE_NULL or
type_ == SCRIPT_DATA_TYPE_UNDEFINED):
return None
else:
raise IOError("Unhandled script data type: {0}".format(type_))
@classmethod
def unpack_from(cls, buf, offset):
type_ = U8.unpack_from(buf, offset)[0]
offset += U8.size
if type_ in ScriptDataValue.DynamicReaders:
return ScriptDataValue.Readers[type_].unpack_from(buf, offset)
elif type_ in ScriptDataValue.PrimitiveReaders:
reader = ScriptDataValue.PrimitiveReaders[type_]
rval = reader.unpack_from(buf, offset)[0]
offset += reader.size
return (rval, offset)
elif type_ == SCRIPT_DATA_TYPE_OBJECTEND:
raise ScriptDataObjectEnd
elif (type_ == SCRIPT_DATA_TYPE_NULL or
type_ == SCRIPT_DATA_TYPE_UNDEFINED):
return (None, offset)
else:
raise IOError("Unhandled script data type: {0}".format(hex(type_)))
class AMF0Value(ScriptDataValue):
pass
class AMF0String(ScriptDataString):
pass
AMF0Number = ScriptDataNumber
AMF3Double = ScriptDataNumber
class AMF3Type(ScriptDataType):
pass
class AMF3Integer(DynamicType, AMF3Type):
__identifier__ = AMF3_TYPE_INTEGER
@classmethod
def size(cls, val):
val &= 0x1fffffff
if val < 0x80:
return 1
elif val < 0x4000:
return 2
elif val < 0x200000:
return 3
elif val < 0x40000000:
return 4
@classmethod
def pack(cls, val):
size = cls.size(val)
buf = bytearray(size)
offset = cls.pack_into(buf, 0, val)
return bytes(buf[:offset])
@classmethod
def pack_into(cls, buf, offset, val):
val &= 0x1fffffff
if val < 0x80:
buf[offset] = val
offset += 1
elif val < 0x4000:
buf[offset] = (val >> 7 & 0x7f) | 0x80
buf[offset+1] = val & 0x7f
offset += 2
elif val < 0x200000:
buf[offset] = (val >> 14 & 0x7f) | 0x80
buf[offset+1] = (val >> 7 & 0x7f) | 0x80
buf[offset+2] = val & 0x7f
offset += 3
elif val < 0x40000000:
buf[offset] = (val >> 22 & 0x7f) | 0x80
buf[offset+1] = (val >> 15 & 0x7f) | 0x80
buf[offset+2] = (val >> 8 & 0x7f) | 0x80
buf[offset+3] = val & 0xff
offset += 4
return offset
@classmethod
def read(cls, fd):
rval, byte_count = 0, 0
byte = U8.read(fd)
while (byte & 0x80) != 0 and byte_count < 3:
rval <<= 7
rval |= byte & 0x7f
byte = U8.read(fd)
byte_count += 1
if byte_count < 3:
rval <<= 7
rval |= byte & 0x7F
else:
rval <<= 8
rval |= byte & 0xff
if (rval & 0x10000000) != 0:
rval -= 0x20000000
return rval
class AMF3String(String):
@classmethod
def size(cls, val, cache):
data = String.pack(val, "utf8", "ignore")
size = len(data)
if size == 0:
return U8.size
elif val in cache:
index = cache.index(val)
return AMF3Integer.size(index << 1)
else:
cache.append(val)
return AMF3Integer.size(size << 1 | 1) + size
@classmethod
def pack(cls, val, cache):
data = String.pack(val, "utf8", "ignore")
size = len(data)
if size == 0:
return U8(AMF3_EMPTY_STRING)
elif val in cache:
index = cache.index(val)
return AMF3Integer(index << 1)
else:
cache.append(val)
chunks = []
chunks.append(AMF3Integer(size << 1 | 1))
chunks.append(data)
return b"".join(chunks)
@classmethod
def read(cls, fd, cache):
header = AMF3Integer.read(fd)
if (header & 1) == 0:
index = header >> 1
return cache[index]
else:
size = header >> 1
data = fd.read(size)
rval = data.decode("utf8", "ignore")
if len(data) > 0:
cache.append(rval)
return rval
class AMF3ObjectBase(object):
__dynamic__ = False
__externalizable__ = False
__members__ = []
_registry = {}
def __init__(self, *args, **kwargs):
for key, value in kwargs.items():
setattr(self, key, value)
def __repr__(self):
return "<{0} {1!r}".format(self.__class__.__name__, self.__dict__)
@classmethod
def register(cls, name):
def deco(amfcls):
amfcls.__name__ = name
if not amfcls.__members__:
amfcls.__members__ = getargspec(amfcls.__init__).args[1:]
cls._registry[name] = amfcls
return amfcls
return deco
@classmethod
def lookup(cls, name):
return cls._registry.get(name, None)
@classmethod
def create(cls, name, externalizable, dynamic, members):
if is_py2:
name = name.encode("utf8")
amfcls = type(name, (cls,), {})
amfcls.__externalizable__ = externalizable
amfcls.__members__ = members
return amfcls
class AMF3Object(OrderedDict, AMF3ObjectBase):
__dynamic__ = True
class AMF3ObjectPacker(DynamicType, AMF3Type):
__identifier__ = AMF3_TYPE_OBJECT
@classmethod
def size(cls, val, str_cache, object_cache, traits_cache):
if val in object_cache:
index = object_cache.index(val)
return AMF3Integer.size(index << 1)
else:
object_cache.append(val)
size = 0
traits = type(val)
if traits in traits_cache:
index = traits_cache.index(traits)
size += AMF3Integer.size(index << 2 | 0x01)
else:
header = 0x03
if traits.__dynamic__:
header |= 0x02 << 2
if traits.__externalizable__:
header |= 0x01 << 2
header |= (len(traits.__members__)) << 4
size += AMF3Integer.size(header)
if isinstance(val, AMF3Object):
size += U8.size
else:
size += AMF3String.size(traits.__name__, cache=str_cache)
traits_cache.append(traits)
for member in traits.__members__:
size += AMF3String.size(member, cache=str_cache)
for member in traits.__members__:
value = getattr(val, member)
size += AMF3Value.size(value, str_cache=str_cache,
object_cache=object_cache,
traits_cache=traits_cache)
if traits.__dynamic__:
if isinstance(val, AMF3Object):
iterator = val.items()
else:
iterator = val.__dict__.items()
for key, value in iterator:
if key in traits.__members__:
continue
size += AMF3String.size(key, cache=str_cache)
size += AMF3Value.size(value, str_cache=str_cache,
object_cache=object_cache,
traits_cache=traits_cache)
size += U8.size
return size
@classmethod
def pack(cls, val, str_cache, object_cache, traits_cache):
chunks = []
if val in object_cache:
index = object_cache.index(val)
return AMF3Integer(index << 1)
else:
object_cache.append(val)
chunks = []
traits = type(val)
if traits in traits_cache:
index = traits_cache.index(traits)
chunks.append(AMF3Integer(index << 2 | 0x01))
else:
header = 0x03
if traits.__dynamic__:
header |= 0x02 << 2
if traits.__externalizable__:
header |= 0x01 << 2
header |= (len(traits.__members__)) << 4
chunks.append(AMF3Integer(header))
if isinstance(val, AMF3Object):
chunks.append(U8(AMF3_EMPTY_STRING))
else:
chunks.append(AMF3String(traits.__name__, cache=str_cache))
traits_cache.append(traits)
for member in traits.__members__:
chunks.append(AMF3String(member, cache=str_cache))
for member in traits.__members__:
value = getattr(val, member)
value = AMF3Value.pack(value, str_cache=str_cache,
object_cache=object_cache,
traits_cache=traits_cache)
chunks.append(value)
if traits.__dynamic__:
if isinstance(val, AMF3Object):
iterator = val.items()
else:
iterator = val.__dict__.items()
for key, value in iterator:
if key in traits.__members__:
continue
key = AMF3String(key, cache=str_cache)
value = AMF3Value.pack(value, str_cache=str_cache,
object_cache=object_cache,
traits_cache=traits_cache)
chunks.append(key)
chunks.append(value)
# Empty string is end of dynamic values
chunks.append(U8(AMF3_CLOSE_DYNAMIC_ARRAY))
return b"".join(chunks)
@classmethod
def read(cls, fd, str_cache, object_cache, traits_cache):
header = AMF3Integer.read(fd)
obj = None
if (header & 1) == 0:
index = header >> 1
obj = object_cache[index]
else:
header >>= 1
if (header & 1) == 0:
index = header >> 1
traits = traits_cache[index]
else:
externalizable = (header & 2) != 0
dynamic = (header & 4) != 0
members_len = header >> 3
class_name = AMF3String.read(fd, cache=str_cache)
members = []
for i in range(members_len):
member_name = AMF3String.read(fd, cache=str_cache)
members.append(member_name)
if len(class_name) == 0:
traits = AMF3Object
elif AMF3ObjectBase.lookup(class_name):
traits = AMF3ObjectBase.lookup(class_name)
traits.__members__ = members
traits.__dynamic__ = dynamic
traits_cache.append(traits)
else:
traits = AMF3ObjectBase.create(class_name, externalizable,
dynamic, members)
traits_cache.append(traits)
values = OrderedDict()
for member in traits.__members__:
value = AMF3Value.read(fd, str_cache=str_cache,
object_cache=object_cache,
traits_cache=traits_cache)
values[member] = value
if traits.__dynamic__:
key = AMF3String.read(fd, cache=str_cache)
while len(key) > 0:
value = AMF3Value.read(fd, str_cache=str_cache,
object_cache=object_cache,
traits_cache=traits_cache)
values[key] = value
key = AMF3String.read(fd, cache=str_cache)
if traits == AMF3Object:
obj = traits(values)
else:
obj = traits(**values)
return obj
class AMF3Array(OrderedDict):
def __init__(self, *args, **kwargs):
if args and isinstance(args[0], list):
OrderedDict.__init__(self, **kwargs)
for i, value in enumerate(args[0]):
self[i] = value
else:
OrderedDict.__init__(self, *args, **kwargs)
def dense_keys(self):
dense_keys = []
for i in range(len(self)):
if i in self:
dense_keys.append(i)
return dense_keys
def dense_values(self):
for key in self.dense_keys():
yield self[key]
class AMF3ArrayPacker(DynamicType, AMF3Type):
__identifier__ = AMF3_TYPE_ARRAY
@classmethod
def size(cls, val, str_cache, object_cache, traits_cache):
if val in object_cache:
index = object_cache.index(val)
return AMF3Integer.size(index << 1)
else:
object_cache.append(val)
size = 0
if isinstance(val, AMF3Array):
dense_keys = val.dense_keys()
length = len(dense_keys)
else:
length = len(val)
dense_keys = list(range(length))
header = length << 1 | 1
size += AMF3Integer.size(header)
if isinstance(val, AMF3Array):
for key, value in val.items():
if key in dense_keys:
continue
size += AMF3String.size(key, cache=str_cache)
size += AMF3Value.size(value, str_cache=str_cache,
object_cache=object_cache,
traits_cache=traits_cache)
size += U8.size
for key in dense_keys:
value = val[key]
size += AMF3Value.size(value, str_cache=str_cache,
object_cache=object_cache,
traits_cache=traits_cache)
return size
@classmethod
def pack(cls, val, str_cache, object_cache, traits_cache):
if val in object_cache:
index = object_cache.index(val)
return AMF3Integer(index << 1)
else:
object_cache.append(val)
chunks = []
if isinstance(val, AMF3Array):
dense_keys = val.dense_keys()
length = len(dense_keys)
else:
length = len(val)
dense_keys = list(range(length))
header = length << 1 | 1
chunks.append(AMF3Integer(header))
if isinstance(val, AMF3Array):
for key, value in val.items():
if key in dense_keys:
continue
chunks.append(AMF3String(key, cache=str_cache))
value = AMF3Value.pack(value, str_cache=str_cache,
object_cache=object_cache,
traits_cache=traits_cache)
chunks.append(value)
# Empty string is end of dynamic values
chunks.append(U8(AMF3_CLOSE_DYNAMIC_ARRAY))
for key in dense_keys:
value = val[key]
value = AMF3Value.pack(value, str_cache=str_cache,
object_cache=object_cache,
traits_cache=traits_cache)
chunks.append(value)
return b"".join(chunks)
@classmethod
def read(cls, fd, str_cache, object_cache, traits_cache):
header = AMF3Integer.read(fd)
obj = None
if (header & 1) == 0:
index = header >> 1
obj = object_cache[index]
else:
header >>= 1
obj = AMF3Array()
object_cache.append(obj)
key = AMF3String.read(fd, cache=str_cache)
while len(key) > 0:
value = AMF3Value.read(fd, str_cache=str_cache,
object_cache=object_cache,
traits_cache=traits_cache)
obj[key] = value
key = AMF3String.read(fd, cache=str_cache)
for i in range(header):
value = AMF3Value.read(fd, str_cache=str_cache,
object_cache=object_cache,
traits_cache=traits_cache)
obj[i] = value
return obj
class AMF3Date(object):
def __init__(self, time):
self.time = time
class AMF3DatePacker(DynamicType, AMF3Type):
__identifier__ = AMF3_TYPE_ARRAY
@classmethod
def size(cls, val, cache):
if val in cache:
index = cache.index(val)
return AMF3Integer.size(index << 1)
else:
cache.append(val)
return AMF3Double.size + U8.size
@classmethod
def pack(cls, val, cache):
if val in cache:
index = cache.index(val)
return AMF3Integer(index << 1)
else:
cache.append(val)
chunks = [U8(AMF3_TYPE_NULL),
AMF3Double(val.time)]
return b"".join(chunks)
@classmethod
def read(cls, fd, cache):
header = AMF3Integer.read(fd)
if (header & 1) == 0:
index = header >> 1
return cache[index]
else:
time = AMF3Double.read(fd)
date = AMF3Date(time)
cache.append(date)
return date
class AMF3Value(DynamicType):
PrimitiveReaders = {
AMF3_TYPE_DOUBLE: AMF3Double,
}
DynamicReaders = {
AMF3_TYPE_INTEGER: AMF3Integer,
}
Readers = PrimitiveReaders.copy()
Readers.update(DynamicReaders)
@classmethod
def size(cls, val, str_cache=None, object_cache=None, traits_cache=None):
if str_cache is None:
str_cache = []
if object_cache is None:
object_cache = []
if traits_cache is None:
traits_cache = []
size = U8.size
if isinstance(val, bool) and val in (False, True):
pass
elif val is None:
pass
elif isinstance(val, integer_types):
if val < AMF3_MIN_INTEGER or val > AMF3_MAX_INTEGER:
size += AMF3Double.size
else:
size += AMF3Integer.size(val)
elif isinstance(val, float):
size += AMF3Double.size
elif isinstance(val, (AMF3Array, list)):
size += AMF3ArrayPacker.size(val, str_cache=str_cache,
object_cache=object_cache,
traits_cache=traits_cache)
elif isinstance(val, string_types):
size += AMF3String.size(val, cache=str_cache)
elif isinstance(val, AMF3ObjectBase):
size += AMF3ObjectPacker.size(val, str_cache=str_cache,
object_cache=object_cache,
traits_cache=traits_cache)
elif isinstance(val, AMF3Date):
size += AMF3DatePacker.size(val, cache=object_cache)
else:
raise ValueError("Unable to pack value of type {0}".format(type(val)))
return size
@classmethod
def pack(cls, val, str_cache=None, object_cache=None, traits_cache=None):
if str_cache is None:
str_cache = []
if object_cache is None:
object_cache = []
if traits_cache is None:
traits_cache = []
chunks = []
if isinstance(val, bool):
if val is False:
chunks.append(U8(AMF3_TYPE_FALSE))
elif val is True:
chunks.append(U8(AMF3_TYPE_TRUE))
elif val is None:
chunks.append(U8(AMF3_TYPE_NULL))
elif isinstance(val, integer_types):
if val < AMF3_MIN_INTEGER or val > AMF3_MAX_INTEGER:
chunks.append(U8(AMF3_TYPE_DOUBLE))
chunks.append(AMF3Double(val))
else:
chunks.append(U8(AMF3_TYPE_INTEGER))
chunks.append(AMF3Integer(val))
elif isinstance(val, float):
chunks.append(U8(AMF3_TYPE_DOUBLE))
chunks.append(AMF3Double(val))
elif isinstance(val, (AMF3Array, list)):
chunks.append(U8(AMF3_TYPE_ARRAY))
chunks.append(AMF3ArrayPacker.pack(val, str_cache=str_cache,
object_cache=object_cache,
traits_cache=traits_cache))
elif isinstance(val, string_types):
chunks.append(U8(AMF3_TYPE_STRING))
chunks.append(AMF3String.pack(val, cache=str_cache))
elif isinstance(val, AMF3ObjectBase):
chunks.append(U8(AMF3_TYPE_OBJECT))
chunks.append(AMF3ObjectPacker.pack(val, str_cache=str_cache,
object_cache=object_cache,
traits_cache=traits_cache))
elif isinstance(val, AMF3Date):
chunks.append(U8(AMF3_TYPE_DATE))
chunks.append(AMF3DatePacker.pack(val, cache=object_cache))
else:
raise ValueError("Unable to pack value of type {0}".format(type(val)))
return b"".join(chunks)
@classmethod
def read(cls, fd, str_cache=None, object_cache=None, traits_cache=None):
type_ = U8.read(fd)
if str_cache is None:
str_cache = []
if object_cache is None:
object_cache = []
if traits_cache is None:
traits_cache = []
if type_ == AMF3_TYPE_UNDEFINED or type_ == AMF3_TYPE_NULL:
return None
elif type_ == AMF3_TYPE_FALSE:
return False
elif type_ == AMF3_TYPE_TRUE:
return True
elif type_ == AMF3_TYPE_STRING:
return AMF3String.read(fd, cache=str_cache)
elif type_ == AMF3_TYPE_ARRAY:
return AMF3ArrayPacker.read(fd, str_cache=str_cache,
object_cache=object_cache,
traits_cache=traits_cache)
elif type_ == AMF3_TYPE_OBJECT:
return AMF3ObjectPacker.read(fd, str_cache=str_cache, object_cache=object_cache,
traits_cache=traits_cache)
elif type_ == AMF3_TYPE_DATE:
return AMF3DatePacker.read(fd, cache=object_cache)
elif type_ in cls.Readers:
return cls.Readers[type_].read(fd)
else:
raise IOError("Unhandled AMF3 type: {0}".format(hex(type_)))
| gpl-2.0 | -2,049,349,614,431,492,400 | 27.554863 | 92 | 0.531614 | false |
halberom/ansible | lib/ansible/plugins/shell/fish.py | 45 | 4770 | # (c) 2014, Chris Church <[email protected]>
#
# This file is part of Ansible.
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.plugins.shell.sh import ShellModule as ShModule
from ansible.compat.six import text_type
from ansible.compat.six.moves import shlex_quote
class ShellModule(ShModule):
# Common shell filenames that this plugin handles
COMPATIBLE_SHELLS = frozenset(('fish',))
# Family of shells this has. Must match the filename without extension
SHELL_FAMILY = 'fish'
_SHELL_EMBEDDED_PY_EOL = '\n'
_SHELL_REDIRECT_ALLNULL = '> /dev/null 2>&1'
_SHELL_AND = '; and'
_SHELL_OR = '; or'
_SHELL_SUB_LEFT = '('
_SHELL_SUB_RIGHT = ')'
_SHELL_GROUP_LEFT = ''
_SHELL_GROUP_RIGHT = ''
def env_prefix(self, **kwargs):
env = self.env.copy()
env.update(kwargs)
return ' '.join(['set -lx %s %s;' % (k, shlex_quote(text_type(v))) for k,v in env.items()])
def build_module_command(self, env_string, shebang, cmd, arg_path=None, rm_tmp=None):
# don't quote the cmd if it's an empty string, because this will break pipelining mode
if cmd.strip() != '':
cmd = shlex_quote(cmd)
cmd_parts = [env_string.strip(), shebang.replace("#!", "").strip(), cmd]
if arg_path is not None:
cmd_parts.append(arg_path)
new_cmd = " ".join(cmd_parts)
if rm_tmp:
new_cmd = 'begin ; %s; rm -rf "%s" %s ; end' % (new_cmd, rm_tmp, self._SHELL_REDIRECT_ALLNULL)
return new_cmd
def checksum(self, path, python_interp):
# The following test is fish-compliant.
#
# In the following test, each condition is a check and logical
# comparison (or or and) that sets the rc value. Every check is run so
# the last check in the series to fail will be the rc that is
# returned.
#
# If a check fails we error before invoking the hash functions because
# hash functions may successfully take the hash of a directory on BSDs
# (UFS filesystem?) which is not what the rest of the ansible code
# expects
#
# If all of the available hashing methods fail we fail with an rc of
# 0. This logic is added to the end of the cmd at the bottom of this
# function.
# Return codes:
# checksum: success!
# 0: Unknown error
# 1: Remote file does not exist
# 2: No read permissions on the file
# 3: File is a directory
# 4: No python interpreter
# Quoting gets complex here. We're writing a python string that's
# used by a variety of shells on the remote host to invoke a python
# "one-liner".
shell_escaped_path = shlex_quote(path)
test = "set rc flag; [ -r %(p)s ] %(shell_or)s set rc 2; [ -f %(p)s ] %(shell_or)s set rc 1; [ -d %(p)s ] %(shell_and)s set rc 3; %(i)s -V 2>/dev/null %(shell_or)s set rc 4; [ x\"$rc\" != \"xflag\" ] %(shell_and)s echo \"$rc \"%(p)s %(shell_and)s exit 0" % dict(p=shell_escaped_path, i=python_interp, shell_and=self._SHELL_AND, shell_or=self._SHELL_OR)
csums = [
u"({0} -c 'import hashlib; BLOCKSIZE = 65536; hasher = hashlib.sha1();{2}afile = open(\"'{1}'\", \"rb\"){2}buf = afile.read(BLOCKSIZE){2}while len(buf) > 0:{2}\thasher.update(buf){2}\tbuf = afile.read(BLOCKSIZE){2}afile.close(){2}print(hasher.hexdigest())' 2>/dev/null)".format(python_interp, shell_escaped_path, self._SHELL_EMBEDDED_PY_EOL), # Python > 2.4 (including python3)
u"({0} -c 'import sha; BLOCKSIZE = 65536; hasher = sha.sha();{2}afile = open(\"'{1}'\", \"rb\"){2}buf = afile.read(BLOCKSIZE){2}while len(buf) > 0:{2}\thasher.update(buf){2}\tbuf = afile.read(BLOCKSIZE){2}afile.close(){2}print(hasher.hexdigest())' 2>/dev/null)".format(python_interp, shell_escaped_path, self._SHELL_EMBEDDED_PY_EOL), # Python == 2.4
]
cmd = (" %s " % self._SHELL_OR).join(csums)
cmd = "%s; %s %s (echo \'0 \'%s)" % (test, cmd, self._SHELL_OR, shell_escaped_path)
return cmd
| gpl-3.0 | 4,027,931,912,160,198,000 | 49.210526 | 394 | 0.626205 | false |
MERegistro/meregistro | meregistro/apps/titulos/models/CohorteExtensionAulicaSeguimiento.py | 1 | 1280 | # -*- coding: utf-8 -*-
from django.db import models
from apps.titulos.models.CohorteExtensionAulica import CohorteExtensionAulica
import datetime
"Seguimiento de cada cohorte de la extensión áulica"
class CohorteExtensionAulicaSeguimiento(models.Model):
cohorte_extension_aulica = models.ForeignKey(CohorteExtensionAulica, related_name='seguimiento')
anio = models.PositiveIntegerField()
solo_cursan_nuevas_unidades = models.PositiveIntegerField()
solo_recursan_nuevas_unidades = models.PositiveIntegerField()
recursan_cursan_nuevas_unidades = models.PositiveIntegerField()
no_cursan = models.PositiveIntegerField()
egresados = models.PositiveIntegerField()
observaciones = models.CharField(max_length=255, null=True, blank=True)
class Meta:
app_label = 'titulos'
ordering = ['cohorte_extension_aulica__cohorte__anio', 'anio']
db_table = 'titulos_cohorte_extension_aulica_seguimiento'
unique_together = ('cohorte_extension_aulica', 'anio') # -> no funciona, valido a mano
def __unicode__(self):
return str(self.anio)
"Sobreescribo el init para agregarle propiedades"
def __init__(self, *args, **kwargs):
super(CohorteExtensionAulicaSeguimiento, self).__init__(*args, **kwargs)
| bsd-3-clause | -3,423,057,151,977,916,400 | 44.642857 | 100 | 0.729264 | false |
Manexware/medical | oemedical/oemedical_invoice/wizard/wizard_appointment_invoice.py | 1 | 5003 |
import logging
from openerp.osv import osv,fields
from openerp import _
#import pooler
logging.basicConfig(level=logging.DEBUG)
class make_medical_appointment_invoice(osv.osv_memory):
_name="oemedical.appointment.invoice"
def create_invoice(self, cr, uid, ids, context={}):
invoice_obj = self.pool.get('account.invoice')
appointment_obj = self.pool.get('oemedical.appointment')
apps = context.get ('active_ids')
pats = []
for app_id in apps:
pats.append(appointment_obj.browse(cr, uid, app_id).patient_id.id)
if pats.count(pats[0]) == len(pats):
invoice_data={}
for app_id in apps:
appointment = appointment_obj.browse( cr, uid, app_id)
# Check if the appointment is invoice exempt, and stop the invoicing process
if appointment.no_invoice :
raise osv.except_osv(_('UserError'), _('The appointment is invoice exempt'))
if appointment.validity_status=='invoiced':
if len(apps) > 1:
raise osv.except_osv(_('UserError'),_('At least one of the selected appointments is already invoiced'))
else:
raise osv.except_osv(_('UserError'),_('Appointment already invoiced'))
if appointment.validity_status=='no':
if len(apps) > 1:
raise osv.except_osv(_('UserError'),_('At least one of the selected appointments can not be invoiced'))
else:
raise osv.except_osv(_('UserError'),_('You can not invoice this appointment'))
if appointment.patient_id.id:
invoice_data['partner_id'] = appointment.patient_id.id
res = self.pool.get('res.partner').address_get(cr, uid, [appointment.patient_id.id], ['contact', 'invoice'])
invoice_data['address_contact_id'] = res['contact']
invoice_data['address_invoice_id'] = res['invoice']
invoice_data['account_id'] = appointment.patient_id.property_account_receivable.id
invoice_data['fiscal_position'] = appointment.patient_id.property_account_position and appointment.patient_id.property_account_position.id or False
invoice_data['payment_term'] = appointment.patient_id.property_payment_term and appointment.patient_id.property_payment_term.id or False
prods_data = {}
for app_id in apps:
appointment = appointment_obj.browse( cr, uid, app_id)
logging.debug('appointment = %s; appointment.consultations = %s', appointment, appointment.consultations)
if appointment.consultations:
logging.debug('appointment.consultations = %s; appointment.consultations.id = %s', appointment.consultations, appointment.consultations.id)
if prods_data.has_key(appointment.consultations.id):
prods_data[appointment.consultations.id]['quantity'] += 1
else:
a = appointment.consultations.product_tmpl_id.property_account_income.id
if not a:
a = appointment.consultations.categ_id.property_account_income_categ.id
prods_data[appointment.consultations.id] = {'product_id':appointment.consultations.id,
'name':appointment.consultations.name,
'quantity':1,
'account_id':a,
'price_unit':appointment.consultations.lst_price}
else:
raise osv.except_osv(_('UserError'),_('No consultation service is connected with the selected appointments'))
product_lines = []
for prod_id, prod_data in prods_data.items():
product_lines.append((0,0,{'product_id':prod_data['product_id'],
'name':prod_data['name'],
'quantity':prod_data['quantity'],
'account_id':prod_data['account_id'],
'price_unit':prod_data['price_unit']}))
invoice_data['invoice_line'] = product_lines
invoice_id = invoice_obj.create(cr, uid, invoice_data)
appointment_obj.write(cr, uid, apps, {'validity_status':'invoiced'})
return {
'domain': "[('id','=', "+str(invoice_id)+")]",
'name': 'Create invoice',
'view_type': 'form',
'view_mode': 'tree,form',
'res_model': 'account.invoice',
'type': 'ir.actions.act_window'
}
else:
raise osv.except_osv(_('UserError'),_('When multiple appointments are selected, patient must be the same'))
make_medical_appointment_invoice()
| gpl-2.0 | 3,683,135,472,002,146,300 | 49.03 | 163 | 0.556666 | false |
jadbin/xpaw | tests/test_commands.py | 1 | 1655 | # coding=utf-8
import pytest
from os.path import join
from xpaw.cmdline import main
from xpaw import __version__
def test_print_help(capsys):
with pytest.raises(SystemExit) as excinfo:
main(argv=['xpaw'])
assert excinfo.value.code == 0
out, _ = capsys.readouterr()
assert out.startswith('usage:')
def test_unknown_command(capsys):
with pytest.raises(SystemExit) as excinfo:
main(argv=['xpaw', 'unknown_command'])
assert excinfo.value.code == 2
_, _ = capsys.readouterr()
def test_version(capsys):
main(argv=['xpaw', 'version'])
out, _ = capsys.readouterr()
assert out.strip() == 'xpaw version {}'.format(__version__)
spider_source = """# coding=utf-8
from xpaw import Spider
class NewSpider(Spider):
def start_requests(self):
pass
def parse(self, response):
pass
"""
def test_crawl_spider(tmpdir, capsys):
spider_file = join(str(tmpdir), 'spider.py')
with open(spider_file, 'w') as f:
f.write(spider_source)
main(argv=['xpaw', 'crawl', spider_file])
_, _ = capsys.readouterr()
def test_crawl_spider_no_config_file(tmpdir, capsys):
with pytest.raises(ValueError):
spider_file = join(str(tmpdir), 'spider.py')
with open(spider_file, 'w') as f:
f.write(spider_source)
main(argv=['xpaw', 'crawl', spider_file,
'-c', 'no_such_config.py'])
_, _ = capsys.readouterr()
def test_crawl_no_spider_file(capsys):
with pytest.raises(SystemExit) as excinfo:
main(argv=['xpaw', 'crawl', 'dont_exist.py'])
assert excinfo.value.code == 2
_, _ = capsys.readouterr()
| apache-2.0 | 1,304,430,475,888,588,000 | 23.338235 | 63 | 0.621752 | false |
pschella/scipy | scipy/special/tests/test_spherical_bessel.py | 44 | 13962 | #
# Tests of spherical Bessel functions.
#
import numpy as np
from numpy.testing import (assert_almost_equal, assert_allclose, dec,
assert_array_almost_equal)
from numpy import sin, cos, sinh, cosh, exp, inf, nan, r_, pi
from scipy.special import spherical_jn, spherical_yn, spherical_in, spherical_kn
from scipy.integrate import quad
class TestSphericalJn:
def test_spherical_jn_exact(self):
# http://dlmf.nist.gov/10.49.E3
# Note: exact expression is numerically stable only for small
# n or z >> n.
x = np.array([0.12, 1.23, 12.34, 123.45, 1234.5])
assert_allclose(spherical_jn(2, x),
(-1/x + 3/x**3)*sin(x) - 3/x**2*cos(x))
def test_spherical_jn_recurrence_complex(self):
# http://dlmf.nist.gov/10.51.E1
n = np.array([1, 2, 3, 7, 12])
x = 1.1 + 1.5j
assert_allclose(spherical_jn(n - 1, x) + spherical_jn(n + 1, x),
(2*n + 1)/x*spherical_jn(n, x))
def test_spherical_jn_recurrence_real(self):
# http://dlmf.nist.gov/10.51.E1
n = np.array([1, 2, 3, 7, 12])
x = 0.12
assert_allclose(spherical_jn(n - 1, x) + spherical_jn(n + 1,x),
(2*n + 1)/x*spherical_jn(n, x))
def test_spherical_jn_inf_real(self):
# http://dlmf.nist.gov/10.52.E3
n = 6
x = np.array([-inf, inf])
assert_allclose(spherical_jn(n, x), np.array([0, 0]))
def test_spherical_jn_inf_complex(self):
# http://dlmf.nist.gov/10.52.E3
n = 7
x = np.array([-inf + 0j, inf + 0j, inf*(1+1j)])
assert_allclose(spherical_jn(n, x), np.array([0, 0, inf*(1+1j)]))
def test_spherical_jn_large_arg_1(self):
# https://github.com/scipy/scipy/issues/2165
# Reference value computed using mpmath, via
# besselj(n + mpf(1)/2, z)*sqrt(pi/(2*z))
assert_allclose(spherical_jn(2, 3350.507), -0.00029846226538040747)
def test_spherical_jn_large_arg_2(self):
# https://github.com/scipy/scipy/issues/1641
# Reference value computed using mpmath, via
# besselj(n + mpf(1)/2, z)*sqrt(pi/(2*z))
assert_allclose(spherical_jn(2, 10000), 3.0590002633029811e-05)
def test_spherical_jn_at_zero(self):
# http://dlmf.nist.gov/10.52.E1
# But note that n = 0 is a special case: j0 = sin(x)/x -> 1
n = np.array([0, 1, 2, 5, 10, 100])
x = 0
assert_allclose(spherical_jn(n, x), np.array([1, 0, 0, 0, 0, 0]))
class TestSphericalYn:
def test_spherical_yn_exact(self):
# http://dlmf.nist.gov/10.49.E5
# Note: exact expression is numerically stable only for small
# n or z >> n.
x = np.array([0.12, 1.23, 12.34, 123.45, 1234.5])
assert_allclose(spherical_yn(2, x),
(1/x - 3/x**3)*cos(x) - 3/x**2*sin(x))
def test_spherical_yn_recurrence_real(self):
# http://dlmf.nist.gov/10.51.E1
n = np.array([1, 2, 3, 7, 12])
x = 0.12
assert_allclose(spherical_yn(n - 1, x) + spherical_yn(n + 1,x),
(2*n + 1)/x*spherical_yn(n, x))
def test_spherical_yn_recurrence_complex(self):
# http://dlmf.nist.gov/10.51.E1
n = np.array([1, 2, 3, 7, 12])
x = 1.1 + 1.5j
assert_allclose(spherical_yn(n - 1, x) + spherical_yn(n + 1, x),
(2*n + 1)/x*spherical_yn(n, x))
def test_spherical_yn_inf_real(self):
# http://dlmf.nist.gov/10.52.E3
n = 6
x = np.array([-inf, inf])
assert_allclose(spherical_yn(n, x), np.array([0, 0]))
def test_spherical_yn_inf_complex(self):
# http://dlmf.nist.gov/10.52.E3
n = 7
x = np.array([-inf + 0j, inf + 0j, inf*(1+1j)])
assert_allclose(spherical_yn(n, x), np.array([0, 0, inf*(1+1j)]))
def test_spherical_yn_at_zero(self):
# http://dlmf.nist.gov/10.52.E2
n = np.array([0, 1, 2, 5, 10, 100])
x = 0
assert_allclose(spherical_yn(n, x), -inf*np.ones(shape=n.shape))
def test_spherical_yn_at_zero_complex(self):
# Consistently with numpy:
# >>> -np.cos(0)/0
# -inf
# >>> -np.cos(0+0j)/(0+0j)
# (-inf + nan*j)
n = np.array([0, 1, 2, 5, 10, 100])
x = 0 + 0j
assert_allclose(spherical_yn(n, x), nan*np.ones(shape=n.shape))
class TestSphericalJnYnCrossProduct:
def test_spherical_jn_yn_cross_product_1(self):
# http://dlmf.nist.gov/10.50.E3
n = np.array([1, 5, 8])
x = np.array([0.1, 1, 10])
left = (spherical_jn(n + 1, x) * spherical_yn(n, x) -
spherical_jn(n, x) * spherical_yn(n + 1, x))
right = 1/x**2
assert_allclose(left, right)
def test_spherical_jn_yn_cross_product_2(self):
# http://dlmf.nist.gov/10.50.E3
n = np.array([1, 5, 8])
x = np.array([0.1, 1, 10])
left = (spherical_jn(n + 2, x) * spherical_yn(n, x) -
spherical_jn(n, x) * spherical_yn(n + 2, x))
right = (2*n + 3)/x**3
assert_allclose(left, right)
class TestSphericalIn:
def test_spherical_in_exact(self):
# http://dlmf.nist.gov/10.49.E9
x = np.array([0.12, 1.23, 12.34, 123.45])
assert_allclose(spherical_in(2, x),
(1/x + 3/x**3)*sinh(x) - 3/x**2*cosh(x))
def test_spherical_in_recurrence_real(self):
# http://dlmf.nist.gov/10.51.E4
n = np.array([1, 2, 3, 7, 12])
x = 0.12
assert_allclose(spherical_in(n - 1, x) - spherical_in(n + 1,x),
(2*n + 1)/x*spherical_in(n, x))
def test_spherical_in_recurrence_complex(self):
# http://dlmf.nist.gov/10.51.E1
n = np.array([1, 2, 3, 7, 12])
x = 1.1 + 1.5j
assert_allclose(spherical_in(n - 1, x) - spherical_in(n + 1,x),
(2*n + 1)/x*spherical_in(n, x))
def test_spherical_in_inf_real(self):
# http://dlmf.nist.gov/10.52.E3
n = 5
x = np.array([-inf, inf])
assert_allclose(spherical_in(n, x), np.array([-inf, inf]))
def test_spherical_in_inf_complex(self):
# http://dlmf.nist.gov/10.52.E5
# Ideally, i1n(n, 1j*inf) = 0 and i1n(n, (1+1j)*inf) = (1+1j)*inf, but
# this appears impossible to achieve because C99 regards any complex
# value with at least one infinite part as a complex infinity, so
# 1j*inf cannot be distinguished from (1+1j)*inf. Therefore, nan is
# the correct return value.
n = 7
x = np.array([-inf + 0j, inf + 0j, inf*(1+1j)])
assert_allclose(spherical_in(n, x), np.array([-inf, inf, nan]))
def test_spherical_in_at_zero(self):
# http://dlmf.nist.gov/10.52.E1
# But note that n = 0 is a special case: i0 = sinh(x)/x -> 1
n = np.array([0, 1, 2, 5, 10, 100])
x = 0
assert_allclose(spherical_in(n, x), np.array([1, 0, 0, 0, 0, 0]))
class TestSphericalKn:
def test_spherical_kn_exact(self):
# http://dlmf.nist.gov/10.49.E13
x = np.array([0.12, 1.23, 12.34, 123.45])
assert_allclose(spherical_kn(2, x),
pi/2*exp(-x)*(1/x + 3/x**2 + 3/x**3))
def test_spherical_kn_recurrence_real(self):
# http://dlmf.nist.gov/10.51.E4
n = np.array([1, 2, 3, 7, 12])
x = 0.12
assert_allclose((-1)**(n - 1)*spherical_kn(n - 1, x) - (-1)**(n + 1)*spherical_kn(n + 1,x),
(-1)**n*(2*n + 1)/x*spherical_kn(n, x))
def test_spherical_kn_recurrence_complex(self):
# http://dlmf.nist.gov/10.51.E4
n = np.array([1, 2, 3, 7, 12])
x = 1.1 + 1.5j
assert_allclose((-1)**(n - 1)*spherical_kn(n - 1, x) - (-1)**(n + 1)*spherical_kn(n + 1,x),
(-1)**n*(2*n + 1)/x*spherical_kn(n, x))
def test_spherical_kn_inf_real(self):
# http://dlmf.nist.gov/10.52.E6
n = 5
x = np.array([-inf, inf])
assert_allclose(spherical_kn(n, x), np.array([-inf, 0]))
def test_spherical_kn_inf_complex(self):
# http://dlmf.nist.gov/10.52.E6
# The behavior at complex infinity depends on the sign of the real
# part: if Re(z) >= 0, then the limit is 0; if Re(z) < 0, then it's
# z*inf. This distinction cannot be captured, so we return nan.
n = 7
x = np.array([-inf + 0j, inf + 0j, inf*(1+1j)])
assert_allclose(spherical_kn(n, x), np.array([-inf, 0, nan]))
def test_spherical_kn_at_zero(self):
# http://dlmf.nist.gov/10.52.E2
n = np.array([0, 1, 2, 5, 10, 100])
x = 0
assert_allclose(spherical_kn(n, x), inf*np.ones(shape=n.shape))
def test_spherical_kn_at_zero_complex(self):
# http://dlmf.nist.gov/10.52.E2
n = np.array([0, 1, 2, 5, 10, 100])
x = 0 + 0j
assert_allclose(spherical_kn(n, x), nan*np.ones(shape=n.shape))
class SphericalDerivativesTestCase:
def fundamental_theorem(self, n, a, b):
integral, tolerance = quad(lambda z: self.df(n, z), a, b)
assert_allclose(integral,
self.f(n, b) - self.f(n, a),
atol=tolerance)
@dec.slow
def test_fundamental_theorem_0(self):
self.fundamental_theorem(0, 3.0, 15.0)
@dec.slow
def test_fundamental_theorem_7(self):
self.fundamental_theorem(7, 0.5, 1.2)
class TestSphericalJnDerivatives(SphericalDerivativesTestCase):
def f(self, n, z):
return spherical_jn(n, z)
def df(self, n, z):
return spherical_jn(n, z, derivative=True)
def test_spherical_jn_d_zero(self):
n = np.array([1, 2, 3, 7, 15])
assert_allclose(spherical_jn(n, 0, derivative=True),
np.zeros(5))
class TestSphericalYnDerivatives(SphericalDerivativesTestCase):
def f(self, n, z):
return spherical_yn(n, z)
def df(self, n, z):
return spherical_yn(n, z, derivative=True)
class TestSphericalInDerivatives(SphericalDerivativesTestCase):
def f(self, n, z):
return spherical_in(n, z)
def df(self, n, z):
return spherical_in(n, z, derivative=True)
def test_spherical_in_d_zero(self):
n = np.array([1, 2, 3, 7, 15])
assert_allclose(spherical_in(n, 0, derivative=True),
np.zeros(5))
class TestSphericalKnDerivatives(SphericalDerivativesTestCase):
def f(self, n, z):
return spherical_kn(n, z)
def df(self, n, z):
return spherical_kn(n, z, derivative=True)
class TestSphericalOld:
# These are tests from the TestSpherical class of test_basic.py,
# rewritten to use spherical_* instead of sph_* but otherwise unchanged.
def test_sph_in(self):
# This test reproduces test_basic.TestSpherical.test_sph_in.
i1n = np.empty((2,2))
x = 0.2
i1n[0][0] = spherical_in(0, x)
i1n[0][1] = spherical_in(1, x)
i1n[1][0] = spherical_in(0, x, derivative=True)
i1n[1][1] = spherical_in(1, x, derivative=True)
inp0 = (i1n[0][1])
inp1 = (i1n[0][0] - 2.0/0.2 * i1n[0][1])
assert_array_almost_equal(i1n[0],np.array([1.0066800127054699381,
0.066933714568029540839]),12)
assert_array_almost_equal(i1n[1],[inp0,inp1],12)
def test_sph_in_kn_order0(self):
x = 1.
sph_i0 = np.empty((2,))
sph_i0[0] = spherical_in(0, x)
sph_i0[1] = spherical_in(0, x, derivative=True)
sph_i0_expected = np.array([np.sinh(x)/x,
np.cosh(x)/x-np.sinh(x)/x**2])
assert_array_almost_equal(r_[sph_i0], sph_i0_expected)
sph_k0 = np.empty((2,))
sph_k0[0] = spherical_kn(0, x)
sph_k0[1] = spherical_kn(0, x, derivative=True)
sph_k0_expected = np.array([0.5*pi*exp(-x)/x,
-0.5*pi*exp(-x)*(1/x+1/x**2)])
assert_array_almost_equal(r_[sph_k0], sph_k0_expected)
def test_sph_jn(self):
s1 = np.empty((2,3))
x = 0.2
s1[0][0] = spherical_jn(0, x)
s1[0][1] = spherical_jn(1, x)
s1[0][2] = spherical_jn(2, x)
s1[1][0] = spherical_jn(0, x, derivative=True)
s1[1][1] = spherical_jn(1, x, derivative=True)
s1[1][2] = spherical_jn(2, x, derivative=True)
s10 = -s1[0][1]
s11 = s1[0][0]-2.0/0.2*s1[0][1]
s12 = s1[0][1]-3.0/0.2*s1[0][2]
assert_array_almost_equal(s1[0],[0.99334665397530607731,
0.066400380670322230863,
0.0026590560795273856680],12)
assert_array_almost_equal(s1[1],[s10,s11,s12],12)
def test_sph_kn(self):
kn = np.empty((2,3))
x = 0.2
kn[0][0] = spherical_kn(0, x)
kn[0][1] = spherical_kn(1, x)
kn[0][2] = spherical_kn(2, x)
kn[1][0] = spherical_kn(0, x, derivative=True)
kn[1][1] = spherical_kn(1, x, derivative=True)
kn[1][2] = spherical_kn(2, x, derivative=True)
kn0 = -kn[0][1]
kn1 = -kn[0][0]-2.0/0.2*kn[0][1]
kn2 = -kn[0][1]-3.0/0.2*kn[0][2]
assert_array_almost_equal(kn[0],[6.4302962978445670140,
38.581777787067402086,
585.15696310385559829],12)
assert_array_almost_equal(kn[1],[kn0,kn1,kn2],9)
def test_sph_yn(self):
sy1 = spherical_yn(2, 0.2)
sy2 = spherical_yn(0, 0.2)
assert_almost_equal(sy1,-377.52483,5) # previous values in the system
assert_almost_equal(sy2,-4.9003329,5)
sphpy = (spherical_yn(0, 0.2) - 2*spherical_yn(2, 0.2))/3
sy3 = spherical_yn(1, 0.2, derivative=True)
assert_almost_equal(sy3,sphpy,4) # compare correct derivative val. (correct =-system val).
| bsd-3-clause | -6,494,177,927,398,915,000 | 36.232 | 99 | 0.537244 | false |
thinkopensolutions/geraldo | site/newsite/site-geraldo/django/core/serializers/python.py | 14 | 3883 | """
A Python "serializer". Doesn't do much serializing per se -- just converts to
and from basic Python data types (lists, dicts, strings, etc.). Useful as a basis for
other serializers.
"""
from django.conf import settings
from django.core.serializers import base
from django.db import models
from django.utils.encoding import smart_unicode
class Serializer(base.Serializer):
"""
Serializes a QuerySet to basic Python objects.
"""
internal_use_only = True
def start_serialization(self):
self._current = None
self.objects = []
def end_serialization(self):
pass
def start_object(self, obj):
self._current = {}
def end_object(self, obj):
self.objects.append({
"model" : smart_unicode(obj._meta),
"pk" : smart_unicode(obj._get_pk_val(), strings_only=True),
"fields" : self._current
})
self._current = None
def handle_field(self, obj, field):
self._current[field.name] = smart_unicode(getattr(obj, field.name), strings_only=True)
def handle_fk_field(self, obj, field):
related = getattr(obj, field.name)
if related is not None:
if field.rel.field_name == related._meta.pk.name:
# Related to remote object via primary key
related = related._get_pk_val()
else:
# Related to remote object via other field
related = getattr(related, field.rel.field_name)
self._current[field.name] = smart_unicode(related, strings_only=True)
def handle_m2m_field(self, obj, field):
if field.creates_table:
self._current[field.name] = [smart_unicode(related._get_pk_val(), strings_only=True)
for related in getattr(obj, field.name).iterator()]
def getvalue(self):
return self.objects
def Deserializer(object_list, **options):
"""
Deserialize simple Python objects back into Django ORM instances.
It's expected that you pass the Python objects themselves (instead of a
stream or a string) to the constructor
"""
models.get_apps()
for d in object_list:
# Look up the model and starting build a dict of data for it.
Model = _get_model(d["model"])
data = {Model._meta.pk.attname : Model._meta.pk.to_python(d["pk"])}
m2m_data = {}
# Handle each field
for (field_name, field_value) in d["fields"].iteritems():
if isinstance(field_value, str):
field_value = smart_unicode(field_value, options.get("encoding", settings.DEFAULT_CHARSET), strings_only=True)
field = Model._meta.get_field(field_name)
# Handle M2M relations
if field.rel and isinstance(field.rel, models.ManyToManyRel):
m2m_convert = field.rel.to._meta.pk.to_python
m2m_data[field.name] = [m2m_convert(smart_unicode(pk)) for pk in field_value]
# Handle FK fields
elif field.rel and isinstance(field.rel, models.ManyToOneRel):
if field_value is not None:
data[field.attname] = field.rel.to._meta.get_field(field.rel.field_name).to_python(field_value)
else:
data[field.attname] = None
# Handle all other fields
else:
data[field.name] = field.to_python(field_value)
yield base.DeserializedObject(Model(**data), m2m_data)
def _get_model(model_identifier):
"""
Helper to look up a model from an "app_label.module_name" string.
"""
try:
Model = models.get_model(*model_identifier.split("."))
except TypeError:
Model = None
if Model is None:
raise base.DeserializationError(u"Invalid model identifier: '%s'" % model_identifier)
return Model
| lgpl-3.0 | -720,187,402,290,941,600 | 34.953704 | 126 | 0.608293 | false |
kutenai/django | tests/admin_views/models.py | 13 | 25314 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import datetime
import os
import tempfile
import uuid
from django.contrib.auth.models import User
from django.contrib.contenttypes.fields import (
GenericForeignKey, GenericRelation,
)
from django.contrib.contenttypes.models import ContentType
from django.core.exceptions import ValidationError
from django.core.files.storage import FileSystemStorage
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class Section(models.Model):
"""
A simple section that links to articles, to test linking to related items
in admin views.
"""
name = models.CharField(max_length=100)
def __str__(self):
return self.name
@property
def name_property(self):
"""
A property that simply returns the name. Used to test #24461
"""
return self.name
@python_2_unicode_compatible
class Article(models.Model):
"""
A simple article to test admin views. Test backwards compatibility.
"""
title = models.CharField(max_length=100)
content = models.TextField()
date = models.DateTimeField()
section = models.ForeignKey(Section, models.CASCADE, null=True, blank=True)
another_section = models.ForeignKey(Section, models.CASCADE, null=True, blank=True, related_name='+')
sub_section = models.ForeignKey(Section, models.SET_NULL, null=True, blank=True, related_name='+')
def __str__(self):
return self.title
def model_year(self):
return self.date.year
model_year.admin_order_field = 'date'
model_year.short_description = ''
def model_year_reversed(self):
return self.date.year
model_year_reversed.admin_order_field = '-date'
model_year_reversed.short_description = ''
@python_2_unicode_compatible
class Book(models.Model):
"""
A simple book that has chapters.
"""
name = models.CharField(max_length=100, verbose_name='¿Name?')
def __str__(self):
return self.name
@python_2_unicode_compatible
class Promo(models.Model):
name = models.CharField(max_length=100, verbose_name='¿Name?')
book = models.ForeignKey(Book, models.CASCADE)
def __str__(self):
return self.name
@python_2_unicode_compatible
class Chapter(models.Model):
title = models.CharField(max_length=100, verbose_name='¿Title?')
content = models.TextField()
book = models.ForeignKey(Book, models.CASCADE)
def __str__(self):
return self.title
class Meta:
# Use a utf-8 bytestring to ensure it works (see #11710)
verbose_name = '¿Chapter?'
@python_2_unicode_compatible
class ChapterXtra1(models.Model):
chap = models.OneToOneField(Chapter, models.CASCADE, verbose_name='¿Chap?')
xtra = models.CharField(max_length=100, verbose_name='¿Xtra?')
def __str__(self):
return '¿Xtra1: %s' % self.xtra
@python_2_unicode_compatible
class ChapterXtra2(models.Model):
chap = models.OneToOneField(Chapter, models.CASCADE, verbose_name='¿Chap?')
xtra = models.CharField(max_length=100, verbose_name='¿Xtra?')
def __str__(self):
return '¿Xtra2: %s' % self.xtra
class RowLevelChangePermissionModel(models.Model):
name = models.CharField(max_length=100, blank=True)
class CustomArticle(models.Model):
content = models.TextField()
date = models.DateTimeField()
@python_2_unicode_compatible
class ModelWithStringPrimaryKey(models.Model):
string_pk = models.CharField(max_length=255, primary_key=True)
def __str__(self):
return self.string_pk
def get_absolute_url(self):
return '/dummy/%s/' % self.string_pk
@python_2_unicode_compatible
class Color(models.Model):
value = models.CharField(max_length=10)
warm = models.BooleanField(default=False)
def __str__(self):
return self.value
# we replicate Color to register with another ModelAdmin
class Color2(Color):
class Meta:
proxy = True
@python_2_unicode_compatible
class Thing(models.Model):
title = models.CharField(max_length=20)
color = models.ForeignKey(Color, models.CASCADE, limit_choices_to={'warm': True})
pub_date = models.DateField(blank=True, null=True)
def __str__(self):
return self.title
@python_2_unicode_compatible
class Actor(models.Model):
name = models.CharField(max_length=50)
age = models.IntegerField()
title = models.CharField(max_length=50, null=True, blank=True)
def __str__(self):
return self.name
@python_2_unicode_compatible
class Inquisition(models.Model):
expected = models.BooleanField(default=False)
leader = models.ForeignKey(Actor, models.CASCADE)
country = models.CharField(max_length=20)
def __str__(self):
return "by %s from %s" % (self.leader, self.country)
@python_2_unicode_compatible
class Sketch(models.Model):
title = models.CharField(max_length=100)
inquisition = models.ForeignKey(
Inquisition,
models.CASCADE,
limit_choices_to={
'leader__name': 'Palin',
'leader__age': 27,
'expected': False,
},
)
defendant0 = models.ForeignKey(
Actor,
models.CASCADE,
limit_choices_to={'title__isnull': False},
related_name='as_defendant0',
)
defendant1 = models.ForeignKey(
Actor,
models.CASCADE,
limit_choices_to={'title__isnull': True},
related_name='as_defendant1',
)
def __str__(self):
return self.title
def today_callable_dict():
return {"last_action__gte": datetime.datetime.today()}
def today_callable_q():
return models.Q(last_action__gte=datetime.datetime.today())
@python_2_unicode_compatible
class Character(models.Model):
username = models.CharField(max_length=100)
last_action = models.DateTimeField()
def __str__(self):
return self.username
@python_2_unicode_compatible
class StumpJoke(models.Model):
variation = models.CharField(max_length=100)
most_recently_fooled = models.ForeignKey(
Character,
models.CASCADE,
limit_choices_to=today_callable_dict,
related_name="+",
)
has_fooled_today = models.ManyToManyField(Character, limit_choices_to=today_callable_q, related_name="+")
def __str__(self):
return self.variation
class Fabric(models.Model):
NG_CHOICES = (
('Textured', (
('x', 'Horizontal'),
('y', 'Vertical'),
)),
('plain', 'Smooth'),
)
surface = models.CharField(max_length=20, choices=NG_CHOICES)
@python_2_unicode_compatible
class Person(models.Model):
GENDER_CHOICES = (
(1, "Male"),
(2, "Female"),
)
name = models.CharField(max_length=100)
gender = models.IntegerField(choices=GENDER_CHOICES)
age = models.IntegerField(default=21)
alive = models.BooleanField(default=True)
def __str__(self):
return self.name
@python_2_unicode_compatible
class Persona(models.Model):
"""
A simple persona associated with accounts, to test inlining of related
accounts which inherit from a common accounts class.
"""
name = models.CharField(blank=False, max_length=80)
def __str__(self):
return self.name
@python_2_unicode_compatible
class Account(models.Model):
"""
A simple, generic account encapsulating the information shared by all
types of accounts.
"""
username = models.CharField(blank=False, max_length=80)
persona = models.ForeignKey(Persona, models.CASCADE, related_name="accounts")
servicename = 'generic service'
def __str__(self):
return "%s: %s" % (self.servicename, self.username)
class FooAccount(Account):
"""A service-specific account of type Foo."""
servicename = 'foo'
class BarAccount(Account):
"""A service-specific account of type Bar."""
servicename = 'bar'
@python_2_unicode_compatible
class Subscriber(models.Model):
name = models.CharField(blank=False, max_length=80)
email = models.EmailField(blank=False, max_length=175)
def __str__(self):
return "%s (%s)" % (self.name, self.email)
class ExternalSubscriber(Subscriber):
pass
class OldSubscriber(Subscriber):
pass
class Media(models.Model):
name = models.CharField(max_length=60)
class Podcast(Media):
release_date = models.DateField()
class Meta:
ordering = ('release_date',) # overridden in PodcastAdmin
class Vodcast(Media):
media = models.OneToOneField(Media, models.CASCADE, primary_key=True, parent_link=True)
released = models.BooleanField(default=False)
class Parent(models.Model):
name = models.CharField(max_length=128)
def clean(self):
if self.name == '_invalid':
raise ValidationError('invalid')
class Child(models.Model):
parent = models.ForeignKey(Parent, models.CASCADE, editable=False)
name = models.CharField(max_length=30, blank=True)
def clean(self):
if self.name == '_invalid':
raise ValidationError('invalid')
@python_2_unicode_compatible
class EmptyModel(models.Model):
def __str__(self):
return "Primary key = %s" % self.id
temp_storage = FileSystemStorage(tempfile.mkdtemp())
UPLOAD_TO = os.path.join(temp_storage.location, 'test_upload')
class Gallery(models.Model):
name = models.CharField(max_length=100)
class Picture(models.Model):
name = models.CharField(max_length=100)
image = models.FileField(storage=temp_storage, upload_to='test_upload')
gallery = models.ForeignKey(Gallery, models.CASCADE, related_name="pictures")
class Language(models.Model):
iso = models.CharField(max_length=5, primary_key=True)
name = models.CharField(max_length=50)
english_name = models.CharField(max_length=50)
shortlist = models.BooleanField(default=False)
class Meta:
ordering = ('iso',)
# a base class for Recommender and Recommendation
class Title(models.Model):
pass
class TitleTranslation(models.Model):
title = models.ForeignKey(Title, models.CASCADE)
text = models.CharField(max_length=100)
class Recommender(Title):
pass
class Recommendation(Title):
recommender = models.ForeignKey(Recommender, models.CASCADE)
class Collector(models.Model):
name = models.CharField(max_length=100)
class Widget(models.Model):
owner = models.ForeignKey(Collector, models.CASCADE)
name = models.CharField(max_length=100)
class DooHickey(models.Model):
code = models.CharField(max_length=10, primary_key=True)
owner = models.ForeignKey(Collector, models.CASCADE)
name = models.CharField(max_length=100)
class Grommet(models.Model):
code = models.AutoField(primary_key=True)
owner = models.ForeignKey(Collector, models.CASCADE)
name = models.CharField(max_length=100)
class Whatsit(models.Model):
index = models.IntegerField(primary_key=True)
owner = models.ForeignKey(Collector, models.CASCADE)
name = models.CharField(max_length=100)
class Doodad(models.Model):
name = models.CharField(max_length=100)
class FancyDoodad(Doodad):
owner = models.ForeignKey(Collector, models.CASCADE)
expensive = models.BooleanField(default=True)
@python_2_unicode_compatible
class Category(models.Model):
collector = models.ForeignKey(Collector, models.CASCADE)
order = models.PositiveIntegerField()
class Meta:
ordering = ('order',)
def __str__(self):
return '%s:o%s' % (self.id, self.order)
def link_posted_default():
return datetime.date.today() - datetime.timedelta(days=7)
class Link(models.Model):
posted = models.DateField(default=link_posted_default)
url = models.URLField()
post = models.ForeignKey("Post", models.CASCADE)
readonly_link_content = models.TextField()
class PrePopulatedPost(models.Model):
title = models.CharField(max_length=100)
published = models.BooleanField(default=False)
slug = models.SlugField()
class PrePopulatedSubPost(models.Model):
post = models.ForeignKey(PrePopulatedPost, models.CASCADE)
subtitle = models.CharField(max_length=100)
subslug = models.SlugField()
class Post(models.Model):
title = models.CharField(max_length=100, help_text="Some help text for the title (with unicode ŠĐĆŽćžšđ)")
content = models.TextField(help_text="Some help text for the content (with unicode ŠĐĆŽćžšđ)")
readonly_content = models.TextField()
posted = models.DateField(
default=datetime.date.today,
help_text="Some help text for the date (with unicode ŠĐĆŽćžšđ)"
)
public = models.NullBooleanField()
def awesomeness_level(self):
return "Very awesome."
# Proxy model to test overridden fields attrs on Post model so as not to
# interfere with other tests.
class FieldOverridePost(Post):
class Meta:
proxy = True
@python_2_unicode_compatible
class Gadget(models.Model):
name = models.CharField(max_length=100)
def __str__(self):
return self.name
@python_2_unicode_compatible
class Villain(models.Model):
name = models.CharField(max_length=100)
def __str__(self):
return self.name
class SuperVillain(Villain):
pass
@python_2_unicode_compatible
class FunkyTag(models.Model):
"Because we all know there's only one real use case for GFKs."
name = models.CharField(max_length=25)
content_type = models.ForeignKey(ContentType, models.CASCADE)
object_id = models.PositiveIntegerField()
content_object = GenericForeignKey('content_type', 'object_id')
def __str__(self):
return self.name
@python_2_unicode_compatible
class Plot(models.Model):
name = models.CharField(max_length=100)
team_leader = models.ForeignKey(Villain, models.CASCADE, related_name='lead_plots')
contact = models.ForeignKey(Villain, models.CASCADE, related_name='contact_plots')
tags = GenericRelation(FunkyTag)
def __str__(self):
return self.name
@python_2_unicode_compatible
class PlotDetails(models.Model):
details = models.CharField(max_length=100)
plot = models.OneToOneField(Plot, models.CASCADE, null=True, blank=True)
def __str__(self):
return self.details
class PlotProxy(Plot):
class Meta:
proxy = True
@python_2_unicode_compatible
class SecretHideout(models.Model):
""" Secret! Not registered with the admin! """
location = models.CharField(max_length=100)
villain = models.ForeignKey(Villain, models.CASCADE)
def __str__(self):
return self.location
@python_2_unicode_compatible
class SuperSecretHideout(models.Model):
""" Secret! Not registered with the admin! """
location = models.CharField(max_length=100)
supervillain = models.ForeignKey(SuperVillain, models.CASCADE)
def __str__(self):
return self.location
@python_2_unicode_compatible
class Bookmark(models.Model):
name = models.CharField(max_length=60)
tag = GenericRelation(FunkyTag, related_query_name='bookmark')
def __str__(self):
return self.name
@python_2_unicode_compatible
class CyclicOne(models.Model):
name = models.CharField(max_length=25)
two = models.ForeignKey('CyclicTwo', models.CASCADE)
def __str__(self):
return self.name
@python_2_unicode_compatible
class CyclicTwo(models.Model):
name = models.CharField(max_length=25)
one = models.ForeignKey(CyclicOne, models.CASCADE)
def __str__(self):
return self.name
class Topping(models.Model):
name = models.CharField(max_length=20)
class Pizza(models.Model):
name = models.CharField(max_length=20)
toppings = models.ManyToManyField('Topping', related_name='pizzas')
class Album(models.Model):
owner = models.ForeignKey(User, models.SET_NULL, null=True, blank=True)
title = models.CharField(max_length=30)
class Employee(Person):
code = models.CharField(max_length=20)
class WorkHour(models.Model):
datum = models.DateField()
employee = models.ForeignKey(Employee, models.CASCADE)
class Question(models.Model):
question = models.CharField(max_length=20)
@python_2_unicode_compatible
class Answer(models.Model):
question = models.ForeignKey(Question, models.PROTECT)
answer = models.CharField(max_length=20)
def __str__(self):
return self.answer
class Reservation(models.Model):
start_date = models.DateTimeField()
price = models.IntegerField()
DRIVER_CHOICES = (
('bill', 'Bill G'),
('steve', 'Steve J'),
)
RESTAURANT_CHOICES = (
('indian', 'A Taste of India'),
('thai', 'Thai Pography'),
('pizza', 'Pizza Mama'),
)
class FoodDelivery(models.Model):
reference = models.CharField(max_length=100)
driver = models.CharField(max_length=100, choices=DRIVER_CHOICES, blank=True)
restaurant = models.CharField(max_length=100, choices=RESTAURANT_CHOICES, blank=True)
class Meta:
unique_together = (("driver", "restaurant"),)
@python_2_unicode_compatible
class CoverLetter(models.Model):
author = models.CharField(max_length=30)
date_written = models.DateField(null=True, blank=True)
def __str__(self):
return self.author
class Paper(models.Model):
title = models.CharField(max_length=30)
author = models.CharField(max_length=30, blank=True, null=True)
class ShortMessage(models.Model):
content = models.CharField(max_length=140)
timestamp = models.DateTimeField(null=True, blank=True)
@python_2_unicode_compatible
class Telegram(models.Model):
title = models.CharField(max_length=30)
date_sent = models.DateField(null=True, blank=True)
def __str__(self):
return self.title
class Story(models.Model):
title = models.CharField(max_length=100)
content = models.TextField()
class OtherStory(models.Model):
title = models.CharField(max_length=100)
content = models.TextField()
class ComplexSortedPerson(models.Model):
name = models.CharField(max_length=100)
age = models.PositiveIntegerField()
is_employee = models.NullBooleanField()
class PluggableSearchPerson(models.Model):
name = models.CharField(max_length=100)
age = models.PositiveIntegerField()
class PrePopulatedPostLargeSlug(models.Model):
"""
Regression test for #15938: a large max_length for the slugfield must not
be localized in prepopulated_fields_js.html or it might end up breaking
the javascript (ie, using THOUSAND_SEPARATOR ends up with maxLength=1,000)
"""
title = models.CharField(max_length=100)
published = models.BooleanField(default=False)
# `db_index=False` because MySQL cannot index large CharField (#21196).
slug = models.SlugField(max_length=1000, db_index=False)
class AdminOrderedField(models.Model):
order = models.IntegerField()
stuff = models.CharField(max_length=200)
class AdminOrderedModelMethod(models.Model):
order = models.IntegerField()
stuff = models.CharField(max_length=200)
def some_order(self):
return self.order
some_order.admin_order_field = 'order'
class AdminOrderedAdminMethod(models.Model):
order = models.IntegerField()
stuff = models.CharField(max_length=200)
class AdminOrderedCallable(models.Model):
order = models.IntegerField()
stuff = models.CharField(max_length=200)
@python_2_unicode_compatible
class Report(models.Model):
title = models.CharField(max_length=100)
def __str__(self):
return self.title
class MainPrepopulated(models.Model):
name = models.CharField(max_length=100)
pubdate = models.DateField()
status = models.CharField(
max_length=20,
choices=(('option one', 'Option One'),
('option two', 'Option Two')))
slug1 = models.SlugField(blank=True)
slug2 = models.SlugField(blank=True)
slug3 = models.SlugField(blank=True, allow_unicode=True)
class RelatedPrepopulated(models.Model):
parent = models.ForeignKey(MainPrepopulated, models.CASCADE)
name = models.CharField(max_length=75)
pubdate = models.DateField()
status = models.CharField(
max_length=20,
choices=(('option one', 'Option One'),
('option two', 'Option Two')))
slug1 = models.SlugField(max_length=50)
slug2 = models.SlugField(max_length=60)
class UnorderedObject(models.Model):
"""
Model without any defined `Meta.ordering`.
Refs #16819.
"""
name = models.CharField(max_length=255)
bool = models.BooleanField(default=True)
class UndeletableObject(models.Model):
"""
Model whose show_delete in admin change_view has been disabled
Refs #10057.
"""
name = models.CharField(max_length=255)
class UnchangeableObject(models.Model):
"""
Model whose change_view is disabled in admin
Refs #20640.
"""
class UserMessenger(models.Model):
"""
Dummy class for testing message_user functions on ModelAdmin
"""
class Simple(models.Model):
"""
Simple model with nothing on it for use in testing
"""
class Choice(models.Model):
choice = models.IntegerField(
blank=True, null=True,
choices=((1, 'Yes'), (0, 'No'), (None, 'No opinion')),
)
class ParentWithDependentChildren(models.Model):
"""
Issue #20522
Model where the validation of child foreign-key relationships depends
on validation of the parent
"""
some_required_info = models.PositiveIntegerField()
family_name = models.CharField(max_length=255, blank=False)
class DependentChild(models.Model):
"""
Issue #20522
Model that depends on validation of the parent class for one of its
fields to validate during clean
"""
parent = models.ForeignKey(ParentWithDependentChildren, models.CASCADE)
family_name = models.CharField(max_length=255)
class _Manager(models.Manager):
def get_queryset(self):
return super(_Manager, self).get_queryset().filter(pk__gt=1)
class FilteredManager(models.Model):
def __str__(self):
return "PK=%d" % self.pk
pk_gt_1 = _Manager()
objects = models.Manager()
class EmptyModelVisible(models.Model):
""" See ticket #11277. """
class EmptyModelHidden(models.Model):
""" See ticket #11277. """
class EmptyModelMixin(models.Model):
""" See ticket #11277. """
class State(models.Model):
name = models.CharField(max_length=100)
class City(models.Model):
state = models.ForeignKey(State, models.CASCADE)
name = models.CharField(max_length=100)
def get_absolute_url(self):
return '/dummy/%s/' % self.pk
class Restaurant(models.Model):
city = models.ForeignKey(City, models.CASCADE)
name = models.CharField(max_length=100)
def get_absolute_url(self):
return '/dummy/%s/' % self.pk
class Worker(models.Model):
work_at = models.ForeignKey(Restaurant, models.CASCADE)
name = models.CharField(max_length=50)
surname = models.CharField(max_length=50)
# Models for #23329
class ReferencedByParent(models.Model):
name = models.CharField(max_length=20, unique=True)
class ParentWithFK(models.Model):
fk = models.ForeignKey(
ReferencedByParent,
models.CASCADE,
to_field='name',
related_name='hidden+',
)
class ChildOfReferer(ParentWithFK):
pass
# Models for #23431
class ReferencedByInline(models.Model):
name = models.CharField(max_length=20, unique=True)
class InlineReference(models.Model):
fk = models.ForeignKey(
ReferencedByInline,
models.CASCADE,
to_field='name',
related_name='hidden+',
)
class InlineReferer(models.Model):
refs = models.ManyToManyField(InlineReference)
# Models for #23604 and #23915
class Recipe(models.Model):
rname = models.CharField(max_length=20, unique=True)
class Ingredient(models.Model):
iname = models.CharField(max_length=20, unique=True)
recipes = models.ManyToManyField(Recipe, through='RecipeIngredient')
class RecipeIngredient(models.Model):
ingredient = models.ForeignKey(Ingredient, models.CASCADE, to_field='iname')
recipe = models.ForeignKey(Recipe, models.CASCADE, to_field='rname')
# Model for #23839
class NotReferenced(models.Model):
# Don't point any FK at this model.
pass
# Models for #23934
class ExplicitlyProvidedPK(models.Model):
name = models.IntegerField(primary_key=True)
class ImplicitlyGeneratedPK(models.Model):
name = models.IntegerField(unique=True)
# Models for #25622
class ReferencedByGenRel(models.Model):
content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE)
object_id = models.PositiveIntegerField()
content_object = GenericForeignKey('content_type', 'object_id')
class GenRelReference(models.Model):
references = GenericRelation(ReferencedByGenRel)
class ParentWithUUIDPK(models.Model):
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
title = models.CharField(max_length=100)
def __str__(self):
return str(self.id)
class RelatedWithUUIDPKModel(models.Model):
parent = models.ForeignKey(ParentWithUUIDPK, on_delete=models.SET_NULL, null=True, blank=True)
| bsd-3-clause | -4,147,450,470,049,681,400 | 24.848671 | 110 | 0.687342 | false |
SoftwareKing/zstack-woodpecker | integrationtest/vm/multihosts/volumes/test_volumes_robot_2h_wfairly.py | 4 | 2395 | '''
Robot testing for test volume operations for 2 hours. Will use weight fairly
strategy.
@author: Youyk
'''
import zstackwoodpecker.action_select as action_select
import zstackwoodpecker.test_util as test_util
import zstackwoodpecker.test_state as test_state
import zstackwoodpecker.test_lib as test_lib
import zstackwoodpecker.header.vm as vm_header
import time
_config_ = {
'timeout' : 9000,
'noparallel' : False
}
test_stub = test_lib.lib_get_test_stub()
test_dict = test_state.TestStateDict()
def test():
test_util.test_dsc('''
Will doing random test operations, including vm create/stop/start/reboot
/destroy, volume create/attach/detach/delete. It doesn't include SG
VIP and snapshots operations. If reach max 4 coexisting running vm,
testing will success and quit.
''')
target_running_vm = 4
test_util.test_dsc('Random Test Begin. Test target: 4 coexisting running VM (not include VR).')
robot_test_obj = test_util.Robot_Test_Object()
robot_test_obj.set_test_dict(test_dict)
robot_test_obj.set_exclusive_actions_list(\
test_state.TestAction.sg_actions \
+ test_state.TestAction.vip_actions \
+ test_state.TestAction.snapshot_actions)
priority_actions = test_state.TestAction.volume_actions * 2
priority_action_obj = action_select.ActionPriority()
priority_action_obj.add_priority_action_list(priority_actions)
robot_test_obj.set_priority_actions(priority_action_obj)
robot_test_obj.set_random_type(action_select.weight_fair_strategy)
rounds = 1
current_time = time.time()
timeout_time = current_time + 7200
while time.time() <= timeout_time:
print "test_dict: %s" % test_dict
test_util.test_dsc('New round %s starts: random operation pickup.' % rounds)
test_lib.lib_vm_random_operation(robot_test_obj)
test_util.test_dsc('Round %s finished. Begin status checking.' % rounds)
rounds += 1
test_lib.lib_robot_status_check(test_dict)
test_util.test_dsc('Reach test pass exit criterial.')
test_lib.lib_robot_cleanup(test_dict)
test_util.test_pass('Create random VM Test Success')
#Will be called only if exception happens in test().
def error_cleanup():
test_lib.lib_error_cleanup(test_dict)
| apache-2.0 | -5,637,509,671,469,078,000 | 36.629032 | 99 | 0.676409 | false |
poojavade/Genomics_Docker | Dockerfiles/gedlab-khmer-filter-abund/pymodules/python2.7/lib/python/ipython-2.2.0-py2.7.egg/IPython/qt/rich_text.py | 12 | 8785 | """ Defines classes and functions for working with Qt's rich text system.
"""
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
# Standard library imports
import io
import os
import re
# System library imports
from IPython.external.qt import QtGui
# IPython imports
from IPython.utils import py3compat
#-----------------------------------------------------------------------------
# Constants
#-----------------------------------------------------------------------------
# A regular expression for an HTML paragraph with no content.
EMPTY_P_RE = re.compile(r'<p[^/>]*>\s*</p>')
# A regular expression for matching images in rich text HTML.
# Note that this is overly restrictive, but Qt's output is predictable...
IMG_RE = re.compile(r'<img src="(?P<name>[\d]+)" />')
#-----------------------------------------------------------------------------
# Classes
#-----------------------------------------------------------------------------
class HtmlExporter(object):
""" A stateful HTML exporter for a Q(Plain)TextEdit.
This class is designed for convenient user interaction.
"""
def __init__(self, control):
""" Creates an HtmlExporter for the given Q(Plain)TextEdit.
"""
assert isinstance(control, (QtGui.QPlainTextEdit, QtGui.QTextEdit))
self.control = control
self.filename = 'ipython.html'
self.image_tag = None
self.inline_png = None
def export(self):
""" Displays a dialog for exporting HTML generated by Qt's rich text
system.
Returns
-------
The name of the file that was saved, or None if no file was saved.
"""
parent = self.control.window()
dialog = QtGui.QFileDialog(parent, 'Save as...')
dialog.setAcceptMode(QtGui.QFileDialog.AcceptSave)
filters = [
'HTML with PNG figures (*.html *.htm)',
'XHTML with inline SVG figures (*.xhtml *.xml)'
]
dialog.setNameFilters(filters)
if self.filename:
dialog.selectFile(self.filename)
root,ext = os.path.splitext(self.filename)
if ext.lower() in ('.xml', '.xhtml'):
dialog.selectNameFilter(filters[-1])
if dialog.exec_():
self.filename = dialog.selectedFiles()[0]
choice = dialog.selectedNameFilter()
html = py3compat.cast_unicode(self.control.document().toHtml())
# Configure the exporter.
if choice.startswith('XHTML'):
exporter = export_xhtml
else:
# If there are PNGs, decide how to export them.
inline = self.inline_png
if inline is None and IMG_RE.search(html):
dialog = QtGui.QDialog(parent)
dialog.setWindowTitle('Save as...')
layout = QtGui.QVBoxLayout(dialog)
msg = "Exporting HTML with PNGs"
info = "Would you like inline PNGs (single large html " \
"file) or external image files?"
checkbox = QtGui.QCheckBox("&Don't ask again")
checkbox.setShortcut('D')
ib = QtGui.QPushButton("&Inline")
ib.setShortcut('I')
eb = QtGui.QPushButton("&External")
eb.setShortcut('E')
box = QtGui.QMessageBox(QtGui.QMessageBox.Question,
dialog.windowTitle(), msg)
box.setInformativeText(info)
box.addButton(ib, QtGui.QMessageBox.NoRole)
box.addButton(eb, QtGui.QMessageBox.YesRole)
layout.setSpacing(0)
layout.addWidget(box)
layout.addWidget(checkbox)
dialog.setLayout(layout)
dialog.show()
reply = box.exec_()
dialog.hide()
inline = (reply == 0)
if checkbox.checkState():
# Don't ask anymore; always use this choice.
self.inline_png = inline
exporter = lambda h, f, i: export_html(h, f, i, inline)
# Perform the export!
try:
return exporter(html, self.filename, self.image_tag)
except Exception as e:
msg = "Error exporting HTML to %s\n" % self.filename + str(e)
reply = QtGui.QMessageBox.warning(parent, 'Error', msg,
QtGui.QMessageBox.Ok, QtGui.QMessageBox.Ok)
return None
#-----------------------------------------------------------------------------
# Functions
#-----------------------------------------------------------------------------
def export_html(html, filename, image_tag = None, inline = True):
""" Export the contents of the ConsoleWidget as HTML.
Parameters
----------
html : unicode,
A Python unicode string containing the Qt HTML to export.
filename : str
The file to be saved.
image_tag : callable, optional (default None)
Used to convert images. See ``default_image_tag()`` for information.
inline : bool, optional [default True]
If True, include images as inline PNGs. Otherwise, include them as
links to external PNG files, mimicking web browsers' "Web Page,
Complete" behavior.
"""
if image_tag is None:
image_tag = default_image_tag
if inline:
path = None
else:
root,ext = os.path.splitext(filename)
path = root + "_files"
if os.path.isfile(path):
raise OSError("%s exists, but is not a directory." % path)
with io.open(filename, 'w', encoding='utf-8') as f:
html = fix_html(html)
f.write(IMG_RE.sub(lambda x: image_tag(x, path = path, format = "png"),
html))
def export_xhtml(html, filename, image_tag=None):
""" Export the contents of the ConsoleWidget as XHTML with inline SVGs.
Parameters
----------
html : unicode,
A Python unicode string containing the Qt HTML to export.
filename : str
The file to be saved.
image_tag : callable, optional (default None)
Used to convert images. See ``default_image_tag()`` for information.
"""
if image_tag is None:
image_tag = default_image_tag
with io.open(filename, 'w', encoding='utf-8') as f:
# Hack to make xhtml header -- note that we are not doing any check for
# valid XML.
offset = html.find("<html>")
assert offset > -1, 'Invalid HTML string: no <html> tag.'
html = (u'<html xmlns="http://www.w3.org/1999/xhtml">\n'+
html[offset+6:])
html = fix_html(html)
f.write(IMG_RE.sub(lambda x: image_tag(x, path = None, format = "svg"),
html))
def default_image_tag(match, path = None, format = "png"):
""" Return (X)HTML mark-up for the image-tag given by match.
This default implementation merely removes the image, and exists mostly
for documentation purposes. More information than is present in the Qt
HTML is required to supply the images.
Parameters
----------
match : re.SRE_Match
A match to an HTML image tag as exported by Qt, with match.group("Name")
containing the matched image ID.
path : string|None, optional [default None]
If not None, specifies a path to which supporting files may be written
(e.g., for linked images). If None, all images are to be included
inline.
format : "png"|"svg", optional [default "png"]
Format for returned or referenced images.
"""
return u''
def fix_html(html):
""" Transforms a Qt-generated HTML string into a standards-compliant one.
Parameters
----------
html : unicode,
A Python unicode string containing the Qt HTML.
"""
# A UTF-8 declaration is needed for proper rendering of some characters
# (e.g., indented commands) when viewing exported HTML on a local system
# (i.e., without seeing an encoding declaration in an HTTP header).
# C.f. http://www.w3.org/International/O-charset for details.
offset = html.find('<head>')
if offset > -1:
html = (html[:offset+6]+
'\n<meta http-equiv="Content-Type" '+
'content="text/html; charset=utf-8" />\n'+
html[offset+6:])
# Replace empty paragraphs tags with line breaks.
html = re.sub(EMPTY_P_RE, '<br/>', html)
return html
| apache-2.0 | 1,153,889,604,806,744,800 | 35.911765 | 80 | 0.533637 | false |
jgcaaprom/android_external_chromium_org | tools/telemetry/telemetry/page/actions/scroll.py | 45 | 3747 | # Copyright 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
from telemetry.page.actions import page_action
class ScrollAction(page_action.PageAction):
# TODO(chrishenry): Ignore attributes, to be deleted when usage in
# other repo is cleaned up.
def __init__(self, selector=None, text=None, element_function=None,
left_start_ratio=0.5, top_start_ratio=0.5, direction='down',
distance=None, distance_expr=None,
speed_in_pixels_per_second=800, use_touch=False):
super(ScrollAction, self).__init__()
if direction not in ['down', 'up', 'left', 'right']:
raise page_action.PageActionNotSupported(
'Invalid scroll direction: %s' % self.direction)
self._selector = selector
self._text = text
self._element_function = element_function
self._left_start_ratio = left_start_ratio
self._top_start_ratio = top_start_ratio
self._direction = direction
self._speed = speed_in_pixels_per_second
self._use_touch = use_touch
self._distance_func = 'null'
if distance:
assert not distance_expr
distance_expr = str(distance)
if distance_expr:
self._distance_func = ('function() { return 0 + %s; }' %
distance_expr)
def WillRunAction(self, tab):
for js_file in ['gesture_common.js', 'scroll.js']:
with open(os.path.join(os.path.dirname(__file__), js_file)) as f:
js = f.read()
tab.ExecuteJavaScript(js)
# Fail if browser doesn't support synthetic scroll gestures.
if not tab.EvaluateJavaScript('window.__ScrollAction_SupportedByBrowser()'):
raise page_action.PageActionNotSupported(
'Synthetic scroll not supported for this browser')
# Fail if this action requires touch and we can't send touch events.
if self._use_touch:
if not page_action.IsGestureSourceTypeSupported(tab, 'touch'):
raise page_action.PageActionNotSupported(
'Touch scroll not supported for this browser')
if (page_action.GetGestureSourceTypeFromOptions(tab) ==
'chrome.gpuBenchmarking.MOUSE_INPUT'):
raise page_action.PageActionNotSupported(
'Scroll requires touch on this page but mouse input was requested')
done_callback = 'function() { window.__scrollActionDone = true; }'
tab.ExecuteJavaScript("""
window.__scrollActionDone = false;
window.__scrollAction = new __ScrollAction(%s, %s);"""
% (done_callback, self._distance_func))
def RunAction(self, tab):
if (self._selector is None and self._text is None and
self._element_function is None):
self._element_function = 'document.body'
gesture_source_type = page_action.GetGestureSourceTypeFromOptions(tab)
if self._use_touch:
gesture_source_type = 'chrome.gpuBenchmarking.TOUCH_INPUT'
code = '''
function(element, info) {
if (!element) {
throw Error('Cannot find element: ' + info);
}
window.__scrollAction.start({
element: element,
left_start_ratio: %s,
top_start_ratio: %s,
direction: '%s',
speed: %s,
gesture_source_type: %s
});
}''' % (self._left_start_ratio,
self._top_start_ratio,
self._direction,
self._speed,
gesture_source_type)
page_action.EvaluateCallbackWithElement(
tab, code, selector=self._selector, text=self._text,
element_function=self._element_function)
tab.WaitForJavaScriptExpression('window.__scrollActionDone', 60)
| bsd-3-clause | 1,665,767,130,861,873,700 | 38.442105 | 80 | 0.634641 | false |
mclaughlin6464/pylearn2 | pylearn2/sandbox/rnn/utils/iteration.py | 34 | 5669 | """
Iterator for RNN data
"""
from functools import wraps
import numpy as np
from theano import config
from pylearn2.sandbox.rnn.space import SequenceDataSpace
from pylearn2.sandbox.rnn.space import SequenceMaskSpace
from pylearn2.space import CompositeSpace
from pylearn2.utils import safe_izip
from pylearn2.utils.iteration import FiniteDatasetIterator
class SequenceDatasetIterator(FiniteDatasetIterator):
"""
Assumes space is a CompositeSpace and source is a tuple.
Parameters
----------
dataset : `Dataset` object
The dataset over which to iterate.
data_specs : tuple
A `(space, source)` tuple. See :ref:`data_specs` for a full
description. Must not contain nested composite spaces.
subset_iterator : object
An iterator object that returns slice objects or lists of
examples, conforming to the interface specified by
:py:class:`SubsetIterator`.
return_tuple : bool, optional
Always return a tuple, even if there is exactly one source
of data being returned. Defaults to `False`.
convert : list of callables
A list of callables, in the same order as the sources
in `data_specs`, that will be called on the individual
source batches prior to any further processing.
Notes
-----
See the documentation for :py:class:`SubsetIterator` for
attribute documentation.
"""
def __init__(self, dataset, data_specs, subset_iterator,
return_tuple=False, convert=None):
# Unpack the data specs into two tuples
space, source = data_specs
if not isinstance(source, tuple):
source = (source,)
# Remove the requested mask from the data specs before calling
# the parent constructor
self._original_source = source
mask_seen, sequence_seen = False, False
self.mask_needed = []
retain = []
for i, (subspace, subsource) in enumerate(safe_izip(space.components,
source)):
if isinstance(subspace, SequenceMaskSpace):
if not subsource.endswith('_mask') or \
subsource[:-5] not in source:
raise ValueError("SequenceDatasetIterator received "
"data_specs containing a "
"SequenceMaskSpace with corresponding "
"source %s, but the source should end "
"with `_mask` in order to match it to the"
"correct SequenceDataSpace")
mask_seen = True
self.mask_needed.append(subsource[:-5])
else:
retain.append(i)
if isinstance(subspace, SequenceDataSpace):
sequence_seen = True
if mask_seen != sequence_seen and i + 1 != len(retain):
raise ValueError("SequenceDatasetIterator was asked to iterate "
"over a sequence mask without data or vice versa")
space = space.restrict(retain)
source = tuple(source[i] for i in retain)
super(SequenceDatasetIterator, self).__init__(
dataset, subset_iterator, (space, source),
return_tuple=return_tuple, convert=convert
)
if not isinstance(space, CompositeSpace):
space = (space,)
else:
space = space.components
assert len(space) == len(source)
self._original_space = space
def __iter__(self):
return self
def _create_mask(self, data):
"""
Creates the mask for a given set of data.
Parameters
----------
data : numpy sequence of ndarrays
A sequence of ndarrays representing sequential data
"""
sequence_lengths = [len(sample) for sample in data]
max_sequence_length = max(sequence_lengths)
mask = np.zeros((max_sequence_length, len(data)), dtype=config.floatX)
for i, sequence_length in enumerate(sequence_lengths):
mask[:sequence_length, i] = 1
return mask
@wraps(FiniteDatasetIterator.next)
def next(self):
next_index = self._subset_iterator.next()
rvals = []
for space, source, data, fn in safe_izip(self._space, self._source,
self._raw_data,
self._convert):
rval = data[next_index]
if isinstance(space, SequenceDataSpace):
# Add padding
max_sequence_length = max(len(sample) for sample
in rval)
batch = np.zeros((len(rval), max_sequence_length) +
data[0].shape[1:], dtype=data[0].dtype)
for i, sample in enumerate(rval):
batch[i, :len(sample)] = sample
# Create mask
if source in self.mask_needed:
mask = self._create_mask(rval)
rval = np.swapaxes(batch, 0, 1)
if fn:
rval = fn(rval)
rvals.append(rval)
if source in self.mask_needed:
rvals.append(mask)
else:
if fn:
rval = fn(rval)
rvals.append(rval)
# Reorder according to given data specs
if not self._return_tuple and len(rval) == 1:
rvals, = rvals
return tuple(rvals)
| bsd-3-clause | 5,495,454,967,043,676,000 | 38.368056 | 79 | 0.553008 | false |
susansalkeld/discsongs | discsongs/lib/python2.7/site-packages/setuptools/package_index.py | 258 | 38941 | """PyPI and direct package downloading"""
import sys
import os
import re
import shutil
import socket
import base64
import hashlib
from functools import wraps
from pkg_resources import (
CHECKOUT_DIST, Distribution, BINARY_DIST, normalize_path, SOURCE_DIST,
require, Environment, find_distributions, safe_name, safe_version,
to_filename, Requirement, DEVELOP_DIST,
)
from setuptools import ssl_support
from distutils import log
from distutils.errors import DistutilsError
from setuptools.compat import (urllib2, httplib, StringIO, HTTPError,
urlparse, urlunparse, unquote, splituser,
url2pathname, name2codepoint,
unichr, urljoin, urlsplit, urlunsplit,
ConfigParser)
from setuptools.compat import filterfalse
from fnmatch import translate
from setuptools.py26compat import strip_fragment
from setuptools.py27compat import get_all_headers
EGG_FRAGMENT = re.compile(r'^egg=([-A-Za-z0-9_.]+)$')
HREF = re.compile("""href\\s*=\\s*['"]?([^'"> ]+)""", re.I)
# this is here to fix emacs' cruddy broken syntax highlighting
PYPI_MD5 = re.compile(
'<a href="([^"#]+)">([^<]+)</a>\n\s+\\(<a (?:title="MD5 hash"\n\s+)'
'href="[^?]+\?:action=show_md5&digest=([0-9a-f]{32})">md5</a>\\)'
)
URL_SCHEME = re.compile('([-+.a-z0-9]{2,}):',re.I).match
EXTENSIONS = ".tar.gz .tar.bz2 .tar .zip .tgz".split()
__all__ = [
'PackageIndex', 'distros_for_url', 'parse_bdist_wininst',
'interpret_distro_name',
]
_SOCKET_TIMEOUT = 15
def parse_bdist_wininst(name):
"""Return (base,pyversion) or (None,None) for possible .exe name"""
lower = name.lower()
base, py_ver, plat = None, None, None
if lower.endswith('.exe'):
if lower.endswith('.win32.exe'):
base = name[:-10]
plat = 'win32'
elif lower.startswith('.win32-py',-16):
py_ver = name[-7:-4]
base = name[:-16]
plat = 'win32'
elif lower.endswith('.win-amd64.exe'):
base = name[:-14]
plat = 'win-amd64'
elif lower.startswith('.win-amd64-py',-20):
py_ver = name[-7:-4]
base = name[:-20]
plat = 'win-amd64'
return base,py_ver,plat
def egg_info_for_url(url):
scheme, server, path, parameters, query, fragment = urlparse(url)
base = unquote(path.split('/')[-1])
if server=='sourceforge.net' and base=='download': # XXX Yuck
base = unquote(path.split('/')[-2])
if '#' in base: base, fragment = base.split('#',1)
return base,fragment
def distros_for_url(url, metadata=None):
"""Yield egg or source distribution objects that might be found at a URL"""
base, fragment = egg_info_for_url(url)
for dist in distros_for_location(url, base, metadata): yield dist
if fragment:
match = EGG_FRAGMENT.match(fragment)
if match:
for dist in interpret_distro_name(
url, match.group(1), metadata, precedence = CHECKOUT_DIST
):
yield dist
def distros_for_location(location, basename, metadata=None):
"""Yield egg or source distribution objects based on basename"""
if basename.endswith('.egg.zip'):
basename = basename[:-4] # strip the .zip
if basename.endswith('.egg') and '-' in basename:
# only one, unambiguous interpretation
return [Distribution.from_location(location, basename, metadata)]
if basename.endswith('.exe'):
win_base, py_ver, platform = parse_bdist_wininst(basename)
if win_base is not None:
return interpret_distro_name(
location, win_base, metadata, py_ver, BINARY_DIST, platform
)
# Try source distro extensions (.zip, .tgz, etc.)
#
for ext in EXTENSIONS:
if basename.endswith(ext):
basename = basename[:-len(ext)]
return interpret_distro_name(location, basename, metadata)
return [] # no extension matched
def distros_for_filename(filename, metadata=None):
"""Yield possible egg or source distribution objects based on a filename"""
return distros_for_location(
normalize_path(filename), os.path.basename(filename), metadata
)
def interpret_distro_name(
location, basename, metadata, py_version=None, precedence=SOURCE_DIST,
platform=None
):
"""Generate alternative interpretations of a source distro name
Note: if `location` is a filesystem filename, you should call
``pkg_resources.normalize_path()`` on it before passing it to this
routine!
"""
# Generate alternative interpretations of a source distro name
# Because some packages are ambiguous as to name/versions split
# e.g. "adns-python-1.1.0", "egenix-mx-commercial", etc.
# So, we generate each possible interepretation (e.g. "adns, python-1.1.0"
# "adns-python, 1.1.0", and "adns-python-1.1.0, no version"). In practice,
# the spurious interpretations should be ignored, because in the event
# there's also an "adns" package, the spurious "python-1.1.0" version will
# compare lower than any numeric version number, and is therefore unlikely
# to match a request for it. It's still a potential problem, though, and
# in the long run PyPI and the distutils should go for "safe" names and
# versions in distribution archive names (sdist and bdist).
parts = basename.split('-')
if not py_version:
for i,p in enumerate(parts[2:]):
if len(p)==5 and p.startswith('py2.'):
return # It's a bdist_dumb, not an sdist -- bail out
for p in range(1,len(parts)+1):
yield Distribution(
location, metadata, '-'.join(parts[:p]), '-'.join(parts[p:]),
py_version=py_version, precedence = precedence,
platform = platform
)
# From Python 2.7 docs
def unique_everseen(iterable, key=None):
"List unique elements, preserving order. Remember all elements ever seen."
# unique_everseen('AAAABBBCCDAABBB') --> A B C D
# unique_everseen('ABBCcAD', str.lower) --> A B C D
seen = set()
seen_add = seen.add
if key is None:
for element in filterfalse(seen.__contains__, iterable):
seen_add(element)
yield element
else:
for element in iterable:
k = key(element)
if k not in seen:
seen_add(k)
yield element
def unique_values(func):
"""
Wrap a function returning an iterable such that the resulting iterable
only ever yields unique items.
"""
@wraps(func)
def wrapper(*args, **kwargs):
return unique_everseen(func(*args, **kwargs))
return wrapper
REL = re.compile("""<([^>]*\srel\s*=\s*['"]?([^'">]+)[^>]*)>""", re.I)
# this line is here to fix emacs' cruddy broken syntax highlighting
@unique_values
def find_external_links(url, page):
"""Find rel="homepage" and rel="download" links in `page`, yielding URLs"""
for match in REL.finditer(page):
tag, rel = match.groups()
rels = set(map(str.strip, rel.lower().split(',')))
if 'homepage' in rels or 'download' in rels:
for match in HREF.finditer(tag):
yield urljoin(url, htmldecode(match.group(1)))
for tag in ("<th>Home Page", "<th>Download URL"):
pos = page.find(tag)
if pos!=-1:
match = HREF.search(page,pos)
if match:
yield urljoin(url, htmldecode(match.group(1)))
user_agent = "Python-urllib/%s setuptools/%s" % (
sys.version[:3], require('setuptools')[0].version
)
class ContentChecker(object):
"""
A null content checker that defines the interface for checking content
"""
def feed(self, block):
"""
Feed a block of data to the hash.
"""
return
def is_valid(self):
"""
Check the hash. Return False if validation fails.
"""
return True
def report(self, reporter, template):
"""
Call reporter with information about the checker (hash name)
substituted into the template.
"""
return
class HashChecker(ContentChecker):
pattern = re.compile(
r'(?P<hash_name>sha1|sha224|sha384|sha256|sha512|md5)='
r'(?P<expected>[a-f0-9]+)'
)
def __init__(self, hash_name, expected):
self.hash_name = hash_name
self.hash = hashlib.new(hash_name)
self.expected = expected
@classmethod
def from_url(cls, url):
"Construct a (possibly null) ContentChecker from a URL"
fragment = urlparse(url)[-1]
if not fragment:
return ContentChecker()
match = cls.pattern.search(fragment)
if not match:
return ContentChecker()
return cls(**match.groupdict())
def feed(self, block):
self.hash.update(block)
def is_valid(self):
return self.hash.hexdigest() == self.expected
def report(self, reporter, template):
msg = template % self.hash_name
return reporter(msg)
class PackageIndex(Environment):
"""A distribution index that scans web pages for download URLs"""
def __init__(
self, index_url="https://pypi.python.org/simple", hosts=('*',),
ca_bundle=None, verify_ssl=True, *args, **kw
):
Environment.__init__(self,*args,**kw)
self.index_url = index_url + "/"[:not index_url.endswith('/')]
self.scanned_urls = {}
self.fetched_urls = {}
self.package_pages = {}
self.allows = re.compile('|'.join(map(translate,hosts))).match
self.to_scan = []
if verify_ssl and ssl_support.is_available and (ca_bundle or ssl_support.find_ca_bundle()):
self.opener = ssl_support.opener_for(ca_bundle)
else: self.opener = urllib2.urlopen
def process_url(self, url, retrieve=False):
"""Evaluate a URL as a possible download, and maybe retrieve it"""
if url in self.scanned_urls and not retrieve:
return
self.scanned_urls[url] = True
if not URL_SCHEME(url):
self.process_filename(url)
return
else:
dists = list(distros_for_url(url))
if dists:
if not self.url_ok(url):
return
self.debug("Found link: %s", url)
if dists or not retrieve or url in self.fetched_urls:
list(map(self.add, dists))
return # don't need the actual page
if not self.url_ok(url):
self.fetched_urls[url] = True
return
self.info("Reading %s", url)
self.fetched_urls[url] = True # prevent multiple fetch attempts
f = self.open_url(url, "Download error on %s: %%s -- Some packages may not be found!" % url)
if f is None: return
self.fetched_urls[f.url] = True
if 'html' not in f.headers.get('content-type', '').lower():
f.close() # not html, we can't process it
return
base = f.url # handle redirects
page = f.read()
if not isinstance(page, str): # We are in Python 3 and got bytes. We want str.
if isinstance(f, HTTPError):
# Errors have no charset, assume latin1:
charset = 'latin-1'
else:
charset = f.headers.get_param('charset') or 'latin-1'
page = page.decode(charset, "ignore")
f.close()
for match in HREF.finditer(page):
link = urljoin(base, htmldecode(match.group(1)))
self.process_url(link)
if url.startswith(self.index_url) and getattr(f,'code',None)!=404:
page = self.process_index(url, page)
def process_filename(self, fn, nested=False):
# process filenames or directories
if not os.path.exists(fn):
self.warn("Not found: %s", fn)
return
if os.path.isdir(fn) and not nested:
path = os.path.realpath(fn)
for item in os.listdir(path):
self.process_filename(os.path.join(path,item), True)
dists = distros_for_filename(fn)
if dists:
self.debug("Found: %s", fn)
list(map(self.add, dists))
def url_ok(self, url, fatal=False):
s = URL_SCHEME(url)
if (s and s.group(1).lower()=='file') or self.allows(urlparse(url)[1]):
return True
msg = ("\nNote: Bypassing %s (disallowed host; see "
"http://bit.ly/1dg9ijs for details).\n")
if fatal:
raise DistutilsError(msg % url)
else:
self.warn(msg, url)
def scan_egg_links(self, search_path):
for item in search_path:
if os.path.isdir(item):
for entry in os.listdir(item):
if entry.endswith('.egg-link'):
self.scan_egg_link(item, entry)
def scan_egg_link(self, path, entry):
lines = [_f for _f in map(str.strip,
open(os.path.join(path, entry))) if _f]
if len(lines)==2:
for dist in find_distributions(os.path.join(path, lines[0])):
dist.location = os.path.join(path, *lines)
dist.precedence = SOURCE_DIST
self.add(dist)
def process_index(self,url,page):
"""Process the contents of a PyPI page"""
def scan(link):
# Process a URL to see if it's for a package page
if link.startswith(self.index_url):
parts = list(map(
unquote, link[len(self.index_url):].split('/')
))
if len(parts)==2 and '#' not in parts[1]:
# it's a package page, sanitize and index it
pkg = safe_name(parts[0])
ver = safe_version(parts[1])
self.package_pages.setdefault(pkg.lower(),{})[link] = True
return to_filename(pkg), to_filename(ver)
return None, None
# process an index page into the package-page index
for match in HREF.finditer(page):
try:
scan(urljoin(url, htmldecode(match.group(1))))
except ValueError:
pass
pkg, ver = scan(url) # ensure this page is in the page index
if pkg:
# process individual package page
for new_url in find_external_links(url, page):
# Process the found URL
base, frag = egg_info_for_url(new_url)
if base.endswith('.py') and not frag:
if ver:
new_url+='#egg=%s-%s' % (pkg,ver)
else:
self.need_version_info(url)
self.scan_url(new_url)
return PYPI_MD5.sub(
lambda m: '<a href="%s#md5=%s">%s</a>' % m.group(1,3,2), page
)
else:
return "" # no sense double-scanning non-package pages
def need_version_info(self, url):
self.scan_all(
"Page at %s links to .py file(s) without version info; an index "
"scan is required.", url
)
def scan_all(self, msg=None, *args):
if self.index_url not in self.fetched_urls:
if msg: self.warn(msg,*args)
self.info(
"Scanning index of all packages (this may take a while)"
)
self.scan_url(self.index_url)
def find_packages(self, requirement):
self.scan_url(self.index_url + requirement.unsafe_name+'/')
if not self.package_pages.get(requirement.key):
# Fall back to safe version of the name
self.scan_url(self.index_url + requirement.project_name+'/')
if not self.package_pages.get(requirement.key):
# We couldn't find the target package, so search the index page too
self.not_found_in_index(requirement)
for url in list(self.package_pages.get(requirement.key,())):
# scan each page that might be related to the desired package
self.scan_url(url)
def obtain(self, requirement, installer=None):
self.prescan()
self.find_packages(requirement)
for dist in self[requirement.key]:
if dist in requirement:
return dist
self.debug("%s does not match %s", requirement, dist)
return super(PackageIndex, self).obtain(requirement,installer)
def check_hash(self, checker, filename, tfp):
"""
checker is a ContentChecker
"""
checker.report(self.debug,
"Validating %%s checksum for %s" % filename)
if not checker.is_valid():
tfp.close()
os.unlink(filename)
raise DistutilsError(
"%s validation failed for %s; "
"possible download problem?" % (
checker.hash.name, os.path.basename(filename))
)
def add_find_links(self, urls):
"""Add `urls` to the list that will be prescanned for searches"""
for url in urls:
if (
self.to_scan is None # if we have already "gone online"
or not URL_SCHEME(url) # or it's a local file/directory
or url.startswith('file:')
or list(distros_for_url(url)) # or a direct package link
):
# then go ahead and process it now
self.scan_url(url)
else:
# otherwise, defer retrieval till later
self.to_scan.append(url)
def prescan(self):
"""Scan urls scheduled for prescanning (e.g. --find-links)"""
if self.to_scan:
list(map(self.scan_url, self.to_scan))
self.to_scan = None # from now on, go ahead and process immediately
def not_found_in_index(self, requirement):
if self[requirement.key]: # we've seen at least one distro
meth, msg = self.info, "Couldn't retrieve index page for %r"
else: # no distros seen for this name, might be misspelled
meth, msg = (self.warn,
"Couldn't find index page for %r (maybe misspelled?)")
meth(msg, requirement.unsafe_name)
self.scan_all()
def download(self, spec, tmpdir):
"""Locate and/or download `spec` to `tmpdir`, returning a local path
`spec` may be a ``Requirement`` object, or a string containing a URL,
an existing local filename, or a project/version requirement spec
(i.e. the string form of a ``Requirement`` object). If it is the URL
of a .py file with an unambiguous ``#egg=name-version`` tag (i.e., one
that escapes ``-`` as ``_`` throughout), a trivial ``setup.py`` is
automatically created alongside the downloaded file.
If `spec` is a ``Requirement`` object or a string containing a
project/version requirement spec, this method returns the location of
a matching distribution (possibly after downloading it to `tmpdir`).
If `spec` is a locally existing file or directory name, it is simply
returned unchanged. If `spec` is a URL, it is downloaded to a subpath
of `tmpdir`, and the local filename is returned. Various errors may be
raised if a problem occurs during downloading.
"""
if not isinstance(spec,Requirement):
scheme = URL_SCHEME(spec)
if scheme:
# It's a url, download it to tmpdir
found = self._download_url(scheme.group(1), spec, tmpdir)
base, fragment = egg_info_for_url(spec)
if base.endswith('.py'):
found = self.gen_setup(found,fragment,tmpdir)
return found
elif os.path.exists(spec):
# Existing file or directory, just return it
return spec
else:
try:
spec = Requirement.parse(spec)
except ValueError:
raise DistutilsError(
"Not a URL, existing file, or requirement spec: %r" %
(spec,)
)
return getattr(self.fetch_distribution(spec, tmpdir),'location',None)
def fetch_distribution(
self, requirement, tmpdir, force_scan=False, source=False,
develop_ok=False, local_index=None
):
"""Obtain a distribution suitable for fulfilling `requirement`
`requirement` must be a ``pkg_resources.Requirement`` instance.
If necessary, or if the `force_scan` flag is set, the requirement is
searched for in the (online) package index as well as the locally
installed packages. If a distribution matching `requirement` is found,
the returned distribution's ``location`` is the value you would have
gotten from calling the ``download()`` method with the matching
distribution's URL or filename. If no matching distribution is found,
``None`` is returned.
If the `source` flag is set, only source distributions and source
checkout links will be considered. Unless the `develop_ok` flag is
set, development and system eggs (i.e., those using the ``.egg-info``
format) will be ignored.
"""
# process a Requirement
self.info("Searching for %s", requirement)
skipped = {}
dist = None
def find(req, env=None):
if env is None:
env = self
# Find a matching distribution; may be called more than once
for dist in env[req.key]:
if dist.precedence==DEVELOP_DIST and not develop_ok:
if dist not in skipped:
self.warn("Skipping development or system egg: %s",dist)
skipped[dist] = 1
continue
if dist in req and (dist.precedence<=SOURCE_DIST or not source):
return dist
if force_scan:
self.prescan()
self.find_packages(requirement)
dist = find(requirement)
if local_index is not None:
dist = dist or find(requirement, local_index)
if dist is None:
if self.to_scan is not None:
self.prescan()
dist = find(requirement)
if dist is None and not force_scan:
self.find_packages(requirement)
dist = find(requirement)
if dist is None:
self.warn(
"No local packages or download links found for %s%s",
(source and "a source distribution of " or ""),
requirement,
)
else:
self.info("Best match: %s", dist)
return dist.clone(location=self.download(dist.location, tmpdir))
def fetch(self, requirement, tmpdir, force_scan=False, source=False):
"""Obtain a file suitable for fulfilling `requirement`
DEPRECATED; use the ``fetch_distribution()`` method now instead. For
backward compatibility, this routine is identical but returns the
``location`` of the downloaded distribution instead of a distribution
object.
"""
dist = self.fetch_distribution(requirement,tmpdir,force_scan,source)
if dist is not None:
return dist.location
return None
def gen_setup(self, filename, fragment, tmpdir):
match = EGG_FRAGMENT.match(fragment)
dists = match and [
d for d in
interpret_distro_name(filename, match.group(1), None) if d.version
] or []
if len(dists)==1: # unambiguous ``#egg`` fragment
basename = os.path.basename(filename)
# Make sure the file has been downloaded to the temp dir.
if os.path.dirname(filename) != tmpdir:
dst = os.path.join(tmpdir, basename)
from setuptools.command.easy_install import samefile
if not samefile(filename, dst):
shutil.copy2(filename, dst)
filename=dst
file = open(os.path.join(tmpdir, 'setup.py'), 'w')
file.write(
"from setuptools import setup\n"
"setup(name=%r, version=%r, py_modules=[%r])\n"
% (
dists[0].project_name, dists[0].version,
os.path.splitext(basename)[0]
)
)
file.close()
return filename
elif match:
raise DistutilsError(
"Can't unambiguously interpret project/version identifier %r; "
"any dashes in the name or version should be escaped using "
"underscores. %r" % (fragment,dists)
)
else:
raise DistutilsError(
"Can't process plain .py files without an '#egg=name-version'"
" suffix to enable automatic setup script generation."
)
dl_blocksize = 8192
def _download_to(self, url, filename):
self.info("Downloading %s", url)
# Download the file
fp, tfp, info = None, None, None
try:
checker = HashChecker.from_url(url)
fp = self.open_url(strip_fragment(url))
if isinstance(fp, HTTPError):
raise DistutilsError(
"Can't download %s: %s %s" % (url, fp.code,fp.msg)
)
headers = fp.info()
blocknum = 0
bs = self.dl_blocksize
size = -1
if "content-length" in headers:
# Some servers return multiple Content-Length headers :(
sizes = get_all_headers(headers, 'Content-Length')
size = max(map(int, sizes))
self.reporthook(url, filename, blocknum, bs, size)
tfp = open(filename,'wb')
while True:
block = fp.read(bs)
if block:
checker.feed(block)
tfp.write(block)
blocknum += 1
self.reporthook(url, filename, blocknum, bs, size)
else:
break
self.check_hash(checker, filename, tfp)
return headers
finally:
if fp: fp.close()
if tfp: tfp.close()
def reporthook(self, url, filename, blocknum, blksize, size):
pass # no-op
def open_url(self, url, warning=None):
if url.startswith('file:'):
return local_open(url)
try:
return open_with_auth(url, self.opener)
except (ValueError, httplib.InvalidURL):
v = sys.exc_info()[1]
msg = ' '.join([str(arg) for arg in v.args])
if warning:
self.warn(warning, msg)
else:
raise DistutilsError('%s %s' % (url, msg))
except urllib2.HTTPError:
v = sys.exc_info()[1]
return v
except urllib2.URLError:
v = sys.exc_info()[1]
if warning:
self.warn(warning, v.reason)
else:
raise DistutilsError("Download error for %s: %s"
% (url, v.reason))
except httplib.BadStatusLine:
v = sys.exc_info()[1]
if warning:
self.warn(warning, v.line)
else:
raise DistutilsError(
'%s returned a bad status line. The server might be '
'down, %s' %
(url, v.line)
)
except httplib.HTTPException:
v = sys.exc_info()[1]
if warning:
self.warn(warning, v)
else:
raise DistutilsError("Download error for %s: %s"
% (url, v))
def _download_url(self, scheme, url, tmpdir):
# Determine download filename
#
name, fragment = egg_info_for_url(url)
if name:
while '..' in name:
name = name.replace('..','.').replace('\\','_')
else:
name = "__downloaded__" # default if URL has no path contents
if name.endswith('.egg.zip'):
name = name[:-4] # strip the extra .zip before download
filename = os.path.join(tmpdir,name)
# Download the file
#
if scheme=='svn' or scheme.startswith('svn+'):
return self._download_svn(url, filename)
elif scheme=='git' or scheme.startswith('git+'):
return self._download_git(url, filename)
elif scheme.startswith('hg+'):
return self._download_hg(url, filename)
elif scheme=='file':
return url2pathname(urlparse(url)[2])
else:
self.url_ok(url, True) # raises error if not allowed
return self._attempt_download(url, filename)
def scan_url(self, url):
self.process_url(url, True)
def _attempt_download(self, url, filename):
headers = self._download_to(url, filename)
if 'html' in headers.get('content-type','').lower():
return self._download_html(url, headers, filename)
else:
return filename
def _download_html(self, url, headers, filename):
file = open(filename)
for line in file:
if line.strip():
# Check for a subversion index page
if re.search(r'<title>([^- ]+ - )?Revision \d+:', line):
# it's a subversion index page:
file.close()
os.unlink(filename)
return self._download_svn(url, filename)
break # not an index page
file.close()
os.unlink(filename)
raise DistutilsError("Unexpected HTML page found at "+url)
def _download_svn(self, url, filename):
url = url.split('#',1)[0] # remove any fragment for svn's sake
creds = ''
if url.lower().startswith('svn:') and '@' in url:
scheme, netloc, path, p, q, f = urlparse(url)
if not netloc and path.startswith('//') and '/' in path[2:]:
netloc, path = path[2:].split('/',1)
auth, host = splituser(netloc)
if auth:
if ':' in auth:
user, pw = auth.split(':',1)
creds = " --username=%s --password=%s" % (user, pw)
else:
creds = " --username="+auth
netloc = host
url = urlunparse((scheme, netloc, url, p, q, f))
self.info("Doing subversion checkout from %s to %s", url, filename)
os.system("svn checkout%s -q %s %s" % (creds, url, filename))
return filename
@staticmethod
def _vcs_split_rev_from_url(url, pop_prefix=False):
scheme, netloc, path, query, frag = urlsplit(url)
scheme = scheme.split('+', 1)[-1]
# Some fragment identification fails
path = path.split('#',1)[0]
rev = None
if '@' in path:
path, rev = path.rsplit('@', 1)
# Also, discard fragment
url = urlunsplit((scheme, netloc, path, query, ''))
return url, rev
def _download_git(self, url, filename):
filename = filename.split('#',1)[0]
url, rev = self._vcs_split_rev_from_url(url, pop_prefix=True)
self.info("Doing git clone from %s to %s", url, filename)
os.system("git clone --quiet %s %s" % (url, filename))
if rev is not None:
self.info("Checking out %s", rev)
os.system("(cd %s && git checkout --quiet %s)" % (
filename,
rev,
))
return filename
def _download_hg(self, url, filename):
filename = filename.split('#',1)[0]
url, rev = self._vcs_split_rev_from_url(url, pop_prefix=True)
self.info("Doing hg clone from %s to %s", url, filename)
os.system("hg clone --quiet %s %s" % (url, filename))
if rev is not None:
self.info("Updating to %s", rev)
os.system("(cd %s && hg up -C -r %s >&-)" % (
filename,
rev,
))
return filename
def debug(self, msg, *args):
log.debug(msg, *args)
def info(self, msg, *args):
log.info(msg, *args)
def warn(self, msg, *args):
log.warn(msg, *args)
# This pattern matches a character entity reference (a decimal numeric
# references, a hexadecimal numeric reference, or a named reference).
entity_sub = re.compile(r'&(#(\d+|x[\da-fA-F]+)|[\w.:-]+);?').sub
def uchr(c):
if not isinstance(c, int):
return c
if c>255: return unichr(c)
return chr(c)
def decode_entity(match):
what = match.group(1)
if what.startswith('#x'):
what = int(what[2:], 16)
elif what.startswith('#'):
what = int(what[1:])
else:
what = name2codepoint.get(what, match.group(0))
return uchr(what)
def htmldecode(text):
"""Decode HTML entities in the given text."""
return entity_sub(decode_entity, text)
def socket_timeout(timeout=15):
def _socket_timeout(func):
def _socket_timeout(*args, **kwargs):
old_timeout = socket.getdefaulttimeout()
socket.setdefaulttimeout(timeout)
try:
return func(*args, **kwargs)
finally:
socket.setdefaulttimeout(old_timeout)
return _socket_timeout
return _socket_timeout
def _encode_auth(auth):
"""
A function compatible with Python 2.3-3.3 that will encode
auth from a URL suitable for an HTTP header.
>>> str(_encode_auth('username%3Apassword'))
'dXNlcm5hbWU6cGFzc3dvcmQ='
Long auth strings should not cause a newline to be inserted.
>>> long_auth = 'username:' + 'password'*10
>>> chr(10) in str(_encode_auth(long_auth))
False
"""
auth_s = unquote(auth)
# convert to bytes
auth_bytes = auth_s.encode()
# use the legacy interface for Python 2.3 support
encoded_bytes = base64.encodestring(auth_bytes)
# convert back to a string
encoded = encoded_bytes.decode()
# strip the trailing carriage return
return encoded.replace('\n','')
class Credential(object):
"""
A username/password pair. Use like a namedtuple.
"""
def __init__(self, username, password):
self.username = username
self.password = password
def __iter__(self):
yield self.username
yield self.password
def __str__(self):
return '%(username)s:%(password)s' % vars(self)
class PyPIConfig(ConfigParser.ConfigParser):
def __init__(self):
"""
Load from ~/.pypirc
"""
defaults = dict.fromkeys(['username', 'password', 'repository'], '')
ConfigParser.ConfigParser.__init__(self, defaults)
rc = os.path.join(os.path.expanduser('~'), '.pypirc')
if os.path.exists(rc):
self.read(rc)
@property
def creds_by_repository(self):
sections_with_repositories = [
section for section in self.sections()
if self.get(section, 'repository').strip()
]
return dict(map(self._get_repo_cred, sections_with_repositories))
def _get_repo_cred(self, section):
repo = self.get(section, 'repository').strip()
return repo, Credential(
self.get(section, 'username').strip(),
self.get(section, 'password').strip(),
)
def find_credential(self, url):
"""
If the URL indicated appears to be a repository defined in this
config, return the credential for that repository.
"""
for repository, cred in self.creds_by_repository.items():
if url.startswith(repository):
return cred
def open_with_auth(url, opener=urllib2.urlopen):
"""Open a urllib2 request, handling HTTP authentication"""
scheme, netloc, path, params, query, frag = urlparse(url)
# Double scheme does not raise on Mac OS X as revealed by a
# failing test. We would expect "nonnumeric port". Refs #20.
if netloc.endswith(':'):
raise httplib.InvalidURL("nonnumeric port: ''")
if scheme in ('http', 'https'):
auth, host = splituser(netloc)
else:
auth = None
if not auth:
cred = PyPIConfig().find_credential(url)
if cred:
auth = str(cred)
info = cred.username, url
log.info('Authenticating as %s for %s (from .pypirc)' % info)
if auth:
auth = "Basic " + _encode_auth(auth)
new_url = urlunparse((scheme,host,path,params,query,frag))
request = urllib2.Request(new_url)
request.add_header("Authorization", auth)
else:
request = urllib2.Request(url)
request.add_header('User-Agent', user_agent)
fp = opener(request)
if auth:
# Put authentication info back into request URL if same host,
# so that links found on the page will work
s2, h2, path2, param2, query2, frag2 = urlparse(fp.url)
if s2==scheme and h2==host:
fp.url = urlunparse((s2,netloc,path2,param2,query2,frag2))
return fp
# adding a timeout to avoid freezing package_index
open_with_auth = socket_timeout(_SOCKET_TIMEOUT)(open_with_auth)
def fix_sf_url(url):
return url # backward compatibility
def local_open(url):
"""Read a local path, with special support for directories"""
scheme, server, path, param, query, frag = urlparse(url)
filename = url2pathname(path)
if os.path.isfile(filename):
return urllib2.urlopen(url)
elif path.endswith('/') and os.path.isdir(filename):
files = []
for f in os.listdir(filename):
if f=='index.html':
fp = open(os.path.join(filename,f),'r')
body = fp.read()
fp.close()
break
elif os.path.isdir(os.path.join(filename,f)):
f+='/'
files.append("<a href=%r>%s</a>" % (f,f))
else:
body = ("<html><head><title>%s</title>" % url) + \
"</head><body>%s</body></html>" % '\n'.join(files)
status, message = 200, "OK"
else:
status, message, body = 404, "Path not found", "Not found"
headers = {'content-type': 'text/html'}
return HTTPError(url, status, message, headers, StringIO(body))
| mit | -7,546,038,247,892,309,000 | 35.806238 | 100 | 0.559796 | false |
ndingwall/scikit-learn | sklearn/tests/test_build.py | 17 | 1175 | import os
import pytest
import textwrap
from sklearn import __version__
from sklearn.utils._openmp_helpers import _openmp_parallelism_enabled
def test_openmp_parallelism_enabled():
# Check that sklearn is built with OpenMP-based parallelism enabled.
# This test can be skipped by setting the environment variable
# ``SKLEARN_SKIP_OPENMP_TEST``.
if os.getenv("SKLEARN_SKIP_OPENMP_TEST"):
pytest.skip("test explicitly skipped (SKLEARN_SKIP_OPENMP_TEST)")
base_url = "dev" if __version__.endswith(".dev0") else "stable"
err_msg = textwrap.dedent(
"""
This test fails because scikit-learn has been built without OpenMP.
This is not recommended since some estimators will run in sequential
mode instead of leveraging thread-based parallelism.
You can find instructions to build scikit-learn with OpenMP at this
address:
https://scikit-learn.org/{}/developers/advanced_installation.html
You can skip this test by setting the environment variable
SKLEARN_SKIP_OPENMP_TEST to any value.
""").format(base_url)
assert _openmp_parallelism_enabled(), err_msg
| bsd-3-clause | 1,773,312,326,618,712,800 | 35.71875 | 77 | 0.701277 | false |
anilpai/leetcode | BalancedBT/LCA_BT.py | 1 | 3404 | from TreeSerialize.TreeSerialize import deserialize, drawtree
'''
Needs Python 3+
'''
class Solution_old(object):
'''
Lowest Common Ancestor (LCA) in a Binary Tree (BT) : Takes additional space, not space optimized.
'''
def findPath(self, root, path, k):
'''
A Helper function to make sure that both nodes exist.
'''
if root is None:
return False
path.append(root.val)
if root.val == k:
return True
# To check if K is found in left or right sub tree.
if ((root.left is not None) and (self.findPath(root.left, path, k))) or ((root.right is not None) and (self.findPath(root.right, path, k))):
return True
# If not present in subtree with root, remove root from path and return False
path.pop()
return False
def lca(self, root, n1, n2):
# To store the paths to n1 and n2 from the root
path1 = []
path2 = []
# Find path from root to n1 and n2 and if either is not present, return -1
if (not self.findPath(root, path1, n1) or not self.findPath(root, path2, n2)):
return -1
# Compare the paths to get the first different value.
i = 0
while i < len(path1) and i < len(path2):
if path1[i] != path2[i]:
break
i += 1
return path1[i-1]
class Solution(object):
'''
Lowest Common Ancestor (LCA) in a Binary Tree (BT)
'''
def lca(self, root, p, q):
if root is None:
return None
left = self.lca(root.left, p, q)
right = self.lca(root.right, p, q)
if (left and right) or (root in [p, q]):
return root
else:
return left or right
if __name__ == '__main__':
""" Both p & q must exist. If either of them is null, then the other node is the least common ancestor. """
old_solution = False
if old_solution:
s = Solution_old()
lca = s.lca(deserialize('[1,2,3,null,null,4,null,5,6]'), 5, 6)
print(lca)
drawtree(deserialize('[1,2,3,null,null,4,null,5,6]'))
print(s.lca(deserialize('[2,1,3,0,7,9,11,12,null,21,20,null,null,38,48,null,null,null,null,17]'), 17, 12))
drawtree(deserialize('[2,1,3,0,7,9,11,12,null,21,20,null,null,38,48,null,null,null,null,17]'))
print(s.lca(deserialize('[2,1,3,0,7,9,11,12,null,21,20,null,null,18,28,null,null,null,null,17,22,31,43,0,47,49,51,52,null,61,40,null,null,48,58,null,null,null,null,47]'), 21, 58))
drawtree(deserialize('[2,1,3,0,7,9,11,12,null,21,20,null,null,18,28,null,null,null,null,17,22,31,43,0,47,49,51,52,null,61,40,null,null,48,58,null,null,null,null,47]'))
else:
s1 = Solution()
# Example 1
# root = deserialize('[1,2,3,null,null,4,null,5,6]')
# p = root.right.left.left
# q = root.right.left.right
# lca = s1.lca(root, p, q)
# print(lca)
# drawtree(deserialize('[1,2,3,null,null,4,null,5,6]'))
# Example 2
root = deserialize('[2,1,3,0,7,9,11,12,null,21,20,null,null,38,48,null,null,null,null,17]')
p = root.left.left.left
q = root.left.right.right.left
lca = s1.lca(root, p, q)
print(lca)
drawtree(deserialize('[2,1,3,0,7,9,11,12,null,21,20,null,null,38,48,null,null,null,null,17]'))
| mit | 7,009,244,606,261,417,000 | 29.945455 | 187 | 0.568743 | false |
mpvismer/pyqtgraph | pyqtgraph/widgets/FeedbackButton.py | 52 | 6429 | # -*- coding: utf-8 -*-
from ..Qt import QtCore, QtGui
__all__ = ['FeedbackButton']
class FeedbackButton(QtGui.QPushButton):
"""
QPushButton which flashes success/failure indication for slow or asynchronous procedures.
"""
### For thread-safetyness
sigCallSuccess = QtCore.Signal(object, object, object)
sigCallFailure = QtCore.Signal(object, object, object)
sigCallProcess = QtCore.Signal(object, object, object)
sigReset = QtCore.Signal()
def __init__(self, *args):
QtGui.QPushButton.__init__(self, *args)
self.origStyle = None
self.origText = self.text()
self.origStyle = self.styleSheet()
self.origTip = self.toolTip()
self.limitedTime = True
#self.textTimer = QtCore.QTimer()
#self.tipTimer = QtCore.QTimer()
#self.textTimer.timeout.connect(self.setText)
#self.tipTimer.timeout.connect(self.setToolTip)
self.sigCallSuccess.connect(self.success)
self.sigCallFailure.connect(self.failure)
self.sigCallProcess.connect(self.processing)
self.sigReset.connect(self.reset)
def feedback(self, success, message=None, tip="", limitedTime=True):
"""Calls success() or failure(). If you want the message to be displayed until the user takes an action, set limitedTime to False. Then call self.reset() after the desired action.Threadsafe."""
if success:
self.success(message, tip, limitedTime=limitedTime)
else:
self.failure(message, tip, limitedTime=limitedTime)
def success(self, message=None, tip="", limitedTime=True):
"""Displays specified message on button and flashes button green to let user know action was successful. If you want the success to be displayed until the user takes an action, set limitedTime to False. Then call self.reset() after the desired action. Threadsafe."""
isGuiThread = QtCore.QThread.currentThread() == QtCore.QCoreApplication.instance().thread()
if isGuiThread:
self.setEnabled(True)
#print "success"
self.startBlink("#0F0", message, tip, limitedTime=limitedTime)
else:
self.sigCallSuccess.emit(message, tip, limitedTime)
def failure(self, message=None, tip="", limitedTime=True):
"""Displays specified message on button and flashes button red to let user know there was an error. If you want the error to be displayed until the user takes an action, set limitedTime to False. Then call self.reset() after the desired action. Threadsafe. """
isGuiThread = QtCore.QThread.currentThread() == QtCore.QCoreApplication.instance().thread()
if isGuiThread:
self.setEnabled(True)
#print "fail"
self.startBlink("#F00", message, tip, limitedTime=limitedTime)
else:
self.sigCallFailure.emit(message, tip, limitedTime)
def processing(self, message="Processing..", tip="", processEvents=True):
"""Displays specified message on button to let user know the action is in progress. Threadsafe. """
isGuiThread = QtCore.QThread.currentThread() == QtCore.QCoreApplication.instance().thread()
if isGuiThread:
self.setEnabled(False)
self.setText(message, temporary=True)
self.setToolTip(tip, temporary=True)
if processEvents:
QtGui.QApplication.processEvents()
else:
self.sigCallProcess.emit(message, tip, processEvents)
def reset(self):
"""Resets the button to its original text and style. Threadsafe."""
isGuiThread = QtCore.QThread.currentThread() == QtCore.QCoreApplication.instance().thread()
if isGuiThread:
self.limitedTime = True
self.setText()
self.setToolTip()
self.setStyleSheet()
else:
self.sigReset.emit()
def startBlink(self, color, message=None, tip="", limitedTime=True):
#if self.origStyle is None:
#self.origStyle = self.styleSheet()
#self.origText = self.text()
self.setFixedHeight(self.height())
if message is not None:
self.setText(message, temporary=True)
self.setToolTip(tip, temporary=True)
self.count = 0
#self.indStyle = "QPushButton {border: 2px solid %s; border-radius: 5px}" % color
self.indStyle = "QPushButton {background-color: %s}" % color
self.limitedTime = limitedTime
self.borderOn()
if limitedTime:
QtCore.QTimer.singleShot(2000, self.setText)
QtCore.QTimer.singleShot(10000, self.setToolTip)
def borderOn(self):
self.setStyleSheet(self.indStyle, temporary=True)
if self.limitedTime or self.count <=2:
QtCore.QTimer.singleShot(100, self.borderOff)
def borderOff(self):
self.setStyleSheet()
self.count += 1
if self.count >= 2:
if self.limitedTime:
return
QtCore.QTimer.singleShot(30, self.borderOn)
def setText(self, text=None, temporary=False):
if text is None:
text = self.origText
#print text
QtGui.QPushButton.setText(self, text)
if not temporary:
self.origText = text
def setToolTip(self, text=None, temporary=False):
if text is None:
text = self.origTip
QtGui.QPushButton.setToolTip(self, text)
if not temporary:
self.origTip = text
def setStyleSheet(self, style=None, temporary=False):
if style is None:
style = self.origStyle
QtGui.QPushButton.setStyleSheet(self, style)
if not temporary:
self.origStyle = style
if __name__ == '__main__':
import time
app = QtGui.QApplication([])
win = QtGui.QMainWindow()
btn = FeedbackButton("Button")
fail = True
def click():
btn.processing("Hold on..")
time.sleep(2.0)
global fail
fail = not fail
if fail:
btn.failure(message="FAIL.", tip="There was a failure. Get over it.")
else:
btn.success(message="Bueno!")
btn.clicked.connect(click)
win.setCentralWidget(btn)
win.show() | mit | 1,070,364,595,558,060,300 | 38.447853 | 274 | 0.618759 | false |
claneys/shinken | test/shinken_modules.py | 13 | 9693 | #!/usr/bin/env python
import os
import re
import copy
import time
import subprocess
import shutil
import datetime # not used but "sub-"imported by livestatus test.. (to be corrected..)
import sys # not here used but "sub-"imported by livestatus test.. (to be corrected..)
#
from shinken.modulesctx import modulesctx
from shinken.objects.module import Module
from shinken.modulesmanager import ModulesManager
from shinken.misc.datamanager import datamgr
from shinken.log import logger
#
from shinken_test import (
modules_dir,
ShinkenTest,
time_hacker, # not used here but "sub"-imported by lvestatus test (to be corrected)
)
modulesctx.set_modulesdir(modules_dir)
# Special Livestatus module opening since the module rename
#from shinken.modules.livestatus import module as livestatus_broker
livestatus_broker = modulesctx.get_module('livestatus')
LiveStatus_broker = livestatus_broker.LiveStatus_broker
LiveStatus = livestatus_broker.LiveStatus
LiveStatusRegenerator = livestatus_broker.LiveStatusRegenerator
LiveStatusQueryCache = livestatus_broker.LiveStatusQueryCache
LiveStatusClientThread = livestatus_broker.LiveStatusClientThread
Logline = livestatus_broker.Logline
LiveStatusLogStoreMongoDB = modulesctx.get_module('logstore-mongodb').LiveStatusLogStoreMongoDB
LiveStatusLogStoreSqlite = modulesctx.get_module('logstore-sqlite').LiveStatusLogStoreSqlite
livestatus_modconf = Module()
livestatus_modconf.module_name = "livestatus"
livestatus_modconf.module_type = livestatus_broker.properties['type']
livestatus_modconf.properties = livestatus_broker.properties.copy()
class ShinkenModulesTest(ShinkenTest):
def do_load_modules(self):
self.modules_manager.load_and_init()
self.log.log("I correctly loaded the modules: [%s]" % (','.join([inst.get_name() for inst in self.modules_manager.instances])))
def update_broker(self, dodeepcopy=False):
# The brok should be manage in the good order
ids = self.sched.brokers['Default-Broker']['broks'].keys()
ids.sort()
for brok_id in ids:
brok = self.sched.brokers['Default-Broker']['broks'][brok_id]
#print "Managing a brok type", brok.type, "of id", brok_id
#if brok.type == 'update_service_status':
# print "Problem?", brok.data['is_problem']
if dodeepcopy:
brok = copy.deepcopy(brok)
brok.prepare()
self.livestatus_broker.manage_brok(brok)
self.sched.brokers['Default-Broker']['broks'] = {}
def init_livestatus(self, modconf=None, dbmodconf=None, needcache=False):
self.livelogs = 'tmp/livelogs.db' + self.testid
if modconf is None:
modconf = Module({'module_name': 'LiveStatus',
'module_type': 'livestatus',
'port': str(50000 + os.getpid()),
'pnp_path': 'tmp/pnp4nagios_test' + self.testid,
'host': '127.0.0.1',
'socket': 'live',
'name': 'test', #?
})
if dbmodconf is None:
dbmodconf = Module({'module_name': 'LogStore',
'module_type': 'logstore_sqlite',
'use_aggressive_sql': "0",
'database_file': self.livelogs,
'archive_path': os.path.join(os.path.dirname(self.livelogs), 'archives'),
})
modconf.modules = [dbmodconf]
self.livestatus_broker = LiveStatus_broker(modconf)
self.livestatus_broker.create_queues()
#--- livestatus_broker.main
self.livestatus_broker.log = logger
# this seems to damage the logger so that the scheduler can't use it
#self.livestatus_broker.log.load_obj(self.livestatus_broker)
self.livestatus_broker.debug_output = []
self.livestatus_broker.modules_manager = ModulesManager('livestatus', modules_dir, [])
self.livestatus_broker.modules_manager.set_modules(self.livestatus_broker.modules)
# We can now output some previouly silented debug ouput
self.livestatus_broker.do_load_modules()
for inst in self.livestatus_broker.modules_manager.instances:
if inst.properties["type"].startswith('logstore'):
f = getattr(inst, 'load', None)
if f and callable(f):
f(self.livestatus_broker) # !!! NOT self here !!!!
break
for s in self.livestatus_broker.debug_output:
print "errors during load", s
del self.livestatus_broker.debug_output
self.livestatus_broker.rg = LiveStatusRegenerator()
self.livestatus_broker.datamgr = datamgr
datamgr.load(self.livestatus_broker.rg)
self.livestatus_broker.query_cache = LiveStatusQueryCache()
if not needcache:
self.livestatus_broker.query_cache.disable()
self.livestatus_broker.rg.register_cache(self.livestatus_broker.query_cache)
#--- livestatus_broker.main
self.livestatus_broker.init()
self.livestatus_broker.db = self.livestatus_broker.modules_manager.instances[0]
self.livestatus_broker.livestatus = LiveStatus(self.livestatus_broker.datamgr, self.livestatus_broker.query_cache, self.livestatus_broker.db, self.livestatus_broker.pnp_path, self.livestatus_broker.from_q)
#--- livestatus_broker.do_main
self.livestatus_broker.db.open()
if hasattr(self.livestatus_broker.db, 'prepare_log_db_table'):
self.livestatus_broker.db.prepare_log_db_table()
#--- livestatus_broker.do_main
class TestConfig(ShinkenModulesTest):
def tearDown(self):
self.livestatus_broker.db.close()
if os.path.exists(self.livelogs):
os.remove(self.livelogs)
if os.path.exists(self.livelogs + "-journal"):
os.remove(self.livelogs + "-journal")
if os.path.exists(self.livestatus_broker.pnp_path):
shutil.rmtree(self.livestatus_broker.pnp_path)
if os.path.exists('var/shinken.log'):
os.remove('var/shinken.log')
if os.path.exists('var/retention.dat'):
os.remove('var/retention.dat')
if os.path.exists('var/status.dat'):
os.remove('var/status.dat')
self.livestatus_broker = None
def contains_line(self, text, pattern):
regex = re.compile(pattern)
for line in text.splitlines():
if re.search(regex, line):
return True
return False
def update_broker(self, dodeepcopy=False):
# The brok should be manage in the good order
ids = self.sched.brokers['Default-Broker']['broks'].keys()
ids.sort()
for brok_id in ids:
brok = self.sched.brokers['Default-Broker']['broks'][brok_id]
#print "Managing a brok type", brok.type, "of id", brok_id
#if brok.type == 'update_service_status':
# print "Problem?", brok.data['is_problem']
if dodeepcopy:
brok = copy.deepcopy(brok)
brok.prepare()
self.livestatus_broker.manage_brok(brok)
self.sched.brokers['Default-Broker']['broks'] = {}
def lines_equal(self, text1, text2):
# gets two multiline strings and compares the contents
# lifestatus output may not be in alphabetical order, so this
# function is used to compare unordered output with unordered
# expected output
# sometimes mklivestatus returns 0 or 1 on an empty result
text1 = text1.replace("200 1", "200 0")
text2 = text2.replace("200 1", "200 0")
text1 = text1.rstrip()
text2 = text2.rstrip()
#print "text1 //%s//" % text1
#print "text2 //%s//" % text2
sorted1 = "\n".join(sorted(text1.split("\n")))
sorted2 = "\n".join(sorted(text2.split("\n")))
len1 = len(text1.split("\n"))
len2 = len(text2.split("\n"))
#print "%s == %s text cmp %s" % (len1, len2, sorted1 == sorted2)
#print "text1 //%s//" % sorted(text1.split("\n"))
#print "text2 //%s//" % sorted(text2.split("\n"))
if sorted1 == sorted2 and len1 == len2:
return True
else:
# Maybe list members are different
# allhosts;test_host_0;test_ok_0;servicegroup_02,servicegroup_01,ok
# allhosts;test_host_0;test_ok_0;servicegroup_02,ok,servicegroup_01
# break it up to
# [['allhosts'], ['test_host_0'], ['test_ok_0'],
# ['ok', 'servicegroup_01', 'servicegroup_02']]
[line for line in sorted(text1.split("\n"))]
data1 = [[sorted(c.split(',')) for c in columns] for columns in [line.split(';') for line in sorted(text1.split("\n")) if line]]
data2 = [[sorted(c.split(',')) for c in columns] for columns in [line.split(';') for line in sorted(text2.split("\n")) if line]]
#print "text1 //%s//" % data1
#print "text2 //%s//" % data2
# cmp is clever enough to handle nested arrays
return cmp(data1, data2) == 0
def show_broks(self, title):
print
print "--- ", title
for brok in sorted(self.sched.broks.values(), lambda x, y: x.id - y.id):
if re.compile('^service_').match(brok.type):
pass
#print "BROK:", brok.type
#print "BROK ", brok.data['in_checking']
self.update_broker()
request = 'GET services\nColumns: service_description is_executing\n'
response, keepalive = self.livestatus_broker.livestatus.handle_request(request)
print response
| agpl-3.0 | -2,737,508,186,846,881,300 | 42.466368 | 213 | 0.623955 | false |
nexusz99/boto | tests/integration/s3/test_mfa.py | 136 | 3629 | # Copyright (c) 2010 Mitch Garnaat http://garnaat.org/
# Copyright (c) 2010, Eucalyptus Systems, Inc.
# All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
"""
Some unit tests for S3 MfaDelete with versioning
"""
import unittest
import time
from nose.plugins.attrib import attr
from boto.s3.connection import S3Connection
from boto.exception import S3ResponseError
from boto.s3.deletemarker import DeleteMarker
@attr('notdefault', 's3mfa')
class S3MFATest (unittest.TestCase):
def setUp(self):
self.conn = S3Connection()
self.bucket_name = 'mfa-%d' % int(time.time())
self.bucket = self.conn.create_bucket(self.bucket_name)
def tearDown(self):
for k in self.bucket.list_versions():
self.bucket.delete_key(k.name, version_id=k.version_id)
self.bucket.delete()
def test_mfadel(self):
# Enable Versioning with MfaDelete
mfa_sn = raw_input('MFA S/N: ')
mfa_code = raw_input('MFA Code: ')
self.bucket.configure_versioning(True, mfa_delete=True, mfa_token=(mfa_sn, mfa_code))
# Check enabling mfa worked.
i = 0
for i in range(1, 8):
time.sleep(2**i)
d = self.bucket.get_versioning_status()
if d['Versioning'] == 'Enabled' and d['MfaDelete'] == 'Enabled':
break
self.assertEqual('Enabled', d['Versioning'])
self.assertEqual('Enabled', d['MfaDelete'])
# Add a key to the bucket
k = self.bucket.new_key('foobar')
s1 = 'This is v1'
k.set_contents_from_string(s1)
v1 = k.version_id
# Now try to delete v1 without the MFA token
try:
self.bucket.delete_key('foobar', version_id=v1)
self.fail("Must fail if not using MFA token")
except S3ResponseError:
pass
# Now try delete again with the MFA token
mfa_code = raw_input('MFA Code: ')
self.bucket.delete_key('foobar', version_id=v1, mfa_token=(mfa_sn, mfa_code))
# Next suspend versioning and disable MfaDelete on the bucket
mfa_code = raw_input('MFA Code: ')
self.bucket.configure_versioning(False, mfa_delete=False, mfa_token=(mfa_sn, mfa_code))
# Lastly, check disabling mfa worked.
i = 0
for i in range(1, 8):
time.sleep(2**i)
d = self.bucket.get_versioning_status()
if d['Versioning'] == 'Suspended' and d['MfaDelete'] != 'Enabled':
break
self.assertEqual('Suspended', d['Versioning'])
self.assertNotEqual('Enabled', d['MfaDelete'])
| mit | -8,621,776,536,053,410,000 | 37.2 | 95 | 0.655277 | false |
fedorpatlin/ansible | lib/ansible/plugins/lookup/credstash.py | 131 | 1891 | # (c) 2015, Ensighten <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.errors import AnsibleError
from ansible.plugins.lookup import LookupBase
CREDSTASH_INSTALLED = False
try:
import credstash
CREDSTASH_INSTALLED = True
except ImportError:
CREDSTASH_INSTALLED = False
class LookupModule(LookupBase):
def run(self, terms, variables, **kwargs):
if not CREDSTASH_INSTALLED:
raise AnsibleError('The credstash lookup plugin requires credstash to be installed.')
ret = []
for term in terms:
try:
version = kwargs.pop('version', '')
region = kwargs.pop('region', None)
table = kwargs.pop('table', 'credential-store')
val = credstash.getSecret(term, version, region, table,
context=kwargs)
except credstash.ItemNotFound:
raise AnsibleError('Key {0} not found'.format(term))
except Exception as e:
raise AnsibleError('Encountered exception while fetching {0}: {1}'.format(term, e.message))
ret.append(val)
return ret
| gpl-3.0 | 3,238,813,008,370,005,000 | 35.365385 | 107 | 0.663141 | false |
arthaud/git-dumper | git_dumper.py | 1 | 21245 | #!/usr/bin/env python3
from contextlib import closing
import argparse
import multiprocessing
import os
import os.path
import re
import socket
import subprocess
import sys
import traceback
import urllib.parse
import urllib3
import bs4
import dulwich.index
import dulwich.objects
import dulwich.pack
import requests
import socks
def printf(fmt, *args, file=sys.stdout):
if args:
fmt = fmt % args
file.write(fmt)
file.flush()
def is_html(response):
""" Return True if the response is a HTML webpage """
return (
"Content-Type" in response.headers
and "text/html" in response.headers["Content-Type"]
)
def get_indexed_files(response):
""" Return all the files in the directory index webpage """
html = bs4.BeautifulSoup(response.text, "html.parser")
files = []
bad = {".", "..", "../"}
for link in html.find_all("a"):
url = urllib.parse.urlparse(link.get("href"))
if (
url.path
and url.path not in bad
and not url.path.startswith("/")
and not url.scheme
and not url.netloc
):
files.append(url.path)
return files
def verify_response(response):
if response.status_code != 200:
return (
False,
"[-] %s/%s responded with status code {code}\n".format(
code=response.status_code
),
)
elif (
"Content-Length" in response.headers
and response.headers["Content-Length"] == 0
):
return False, "[-] %s/%s responded with a zero-length body\n"
elif (
"Content-Type" in response.headers
and "text/html" in response.headers["Content-Type"]
):
return False, "[-] %s/%s responded with HTML\n"
else:
return True, True
def create_intermediate_dirs(path):
""" Create intermediate directories, if necessary """
dirname, basename = os.path.split(path)
if dirname and not os.path.exists(dirname):
try:
os.makedirs(dirname)
except FileExistsError:
pass # race condition
def get_referenced_sha1(obj_file):
""" Return all the referenced SHA1 in the given object file """
objs = []
if isinstance(obj_file, dulwich.objects.Commit):
objs.append(obj_file.tree.decode())
for parent in obj_file.parents:
objs.append(parent.decode())
elif isinstance(obj_file, dulwich.objects.Tree):
for item in obj_file.iteritems():
objs.append(item.sha.decode())
elif isinstance(obj_file, dulwich.objects.Blob):
pass
elif isinstance(obj_file, dulwich.objects.Tag):
pass
else:
printf(
"error: unexpected object type: %r\n" % obj_file, file=sys.stderr
)
sys.exit(1)
return objs
class Worker(multiprocessing.Process):
""" Worker for process_tasks """
def __init__(self, pending_tasks, tasks_done, args):
super().__init__()
self.daemon = True
self.pending_tasks = pending_tasks
self.tasks_done = tasks_done
self.args = args
def run(self):
# initialize process
self.init(*self.args)
# fetch and do tasks
while True:
task = self.pending_tasks.get(block=True)
if task is None: # end signal
return
try:
result = self.do_task(task, *self.args)
except Exception:
printf("Task %s raised exception:\n", task, file=sys.stderr)
traceback.print_exc()
result = []
assert isinstance(
result, list
), "do_task() should return a list of tasks"
self.tasks_done.put(result)
def init(self, *args):
raise NotImplementedError
def do_task(self, task, *args):
raise NotImplementedError
def process_tasks(initial_tasks, worker, jobs, args=(), tasks_done=None):
""" Process tasks in parallel """
if not initial_tasks:
return
tasks_seen = set(tasks_done) if tasks_done else set()
pending_tasks = multiprocessing.Queue()
tasks_done = multiprocessing.Queue()
num_pending_tasks = 0
# add all initial tasks in the queue
for task in initial_tasks:
assert task is not None
if task not in tasks_seen:
pending_tasks.put(task)
num_pending_tasks += 1
tasks_seen.add(task)
# initialize processes
processes = [worker(pending_tasks, tasks_done, args) for _ in range(jobs)]
# launch them all
for p in processes:
p.start()
# collect task results
while num_pending_tasks > 0:
task_result = tasks_done.get(block=True)
num_pending_tasks -= 1
for task in task_result:
assert task is not None
if task not in tasks_seen:
pending_tasks.put(task)
num_pending_tasks += 1
tasks_seen.add(task)
# send termination signal (task=None)
for _ in range(jobs):
pending_tasks.put(None)
# join all
for p in processes:
p.join()
class DownloadWorker(Worker):
""" Download a list of files """
def init(self, url, directory, retry, timeout, http_headers):
self.session = requests.Session()
self.session.verify = False
self.session.headers = http_headers
self.session.mount(
url, requests.adapters.HTTPAdapter(max_retries=retry)
)
def do_task(self, filepath, url, directory, retry, timeout, http_headers):
if os.path.isfile(os.path.join(directory, filepath)):
printf("[-] Already downloaded %s/%s\n", url, filepath)
return []
with closing(
self.session.get(
"%s/%s" % (url, filepath),
allow_redirects=False,
stream=True,
timeout=timeout,
)
) as response:
printf(
"[-] Fetching %s/%s [%d]\n",
url,
filepath,
response.status_code,
)
valid, error_message = verify_response(response)
if not valid:
printf(error_message, url, filepath, file=sys.stderr)
return []
abspath = os.path.abspath(os.path.join(directory, filepath))
create_intermediate_dirs(abspath)
# write file
with open(abspath, "wb") as f:
for chunk in response.iter_content(4096):
f.write(chunk)
return []
class RecursiveDownloadWorker(DownloadWorker):
""" Download a directory recursively """
def do_task(self, filepath, url, directory, retry, timeout, http_headers):
if os.path.isfile(os.path.join(directory, filepath)):
printf("[-] Already downloaded %s/%s\n", url, filepath)
return []
with closing(
self.session.get(
"%s/%s" % (url, filepath),
allow_redirects=False,
stream=True,
timeout=timeout,
)
) as response:
printf(
"[-] Fetching %s/%s [%d]\n",
url,
filepath,
response.status_code,
)
if (
response.status_code in (301, 302)
and "Location" in response.headers
and response.headers["Location"].endswith(filepath + "/")
):
return [filepath + "/"]
if filepath.endswith("/"): # directory index
assert is_html(response)
return [
filepath + filename
for filename in get_indexed_files(response)
]
else: # file
valid, error_message = verify_response(response)
if not valid:
printf(error_message, url, filepath, file=sys.stderr)
return []
abspath = os.path.abspath(os.path.join(directory, filepath))
create_intermediate_dirs(abspath)
# write file
with open(abspath, "wb") as f:
for chunk in response.iter_content(4096):
f.write(chunk)
return []
class FindRefsWorker(DownloadWorker):
""" Find refs/ """
def do_task(self, filepath, url, directory, retry, timeout, http_headers):
response = self.session.get(
"%s/%s" % (url, filepath), allow_redirects=False, timeout=timeout
)
printf(
"[-] Fetching %s/%s [%d]\n", url, filepath, response.status_code
)
valid, error_message = verify_response(response)
if not valid:
printf(error_message, url, filepath, file=sys.stderr)
return []
abspath = os.path.abspath(os.path.join(directory, filepath))
create_intermediate_dirs(abspath)
# write file
with open(abspath, "w") as f:
f.write(response.text)
# find refs
tasks = []
for ref in re.findall(
r"(refs(/[a-zA-Z0-9\-\.\_\*]+)+)", response.text
):
ref = ref[0]
if not ref.endswith("*"):
tasks.append(".git/%s" % ref)
tasks.append(".git/logs/%s" % ref)
return tasks
class FindObjectsWorker(DownloadWorker):
""" Find objects """
def do_task(self, obj, url, directory, retry, timeout, http_headers):
filepath = ".git/objects/%s/%s" % (obj[:2], obj[2:])
if os.path.isfile(os.path.join(directory, filepath)):
printf("[-] Already downloaded %s/%s\n", url, filepath)
else:
response = self.session.get(
"%s/%s" % (url, filepath),
allow_redirects=False,
timeout=timeout,
)
printf(
"[-] Fetching %s/%s [%d]\n",
url,
filepath,
response.status_code,
)
valid, error_message = verify_response(response)
if not valid:
printf(error_message, url, filepath, file=sys.stderr)
return []
abspath = os.path.abspath(os.path.join(directory, filepath))
create_intermediate_dirs(abspath)
# write file
with open(abspath, "wb") as f:
f.write(response.content)
abspath = os.path.abspath(os.path.join(directory, filepath))
# parse object file to find other objects
obj_file = dulwich.objects.ShaFile.from_path(abspath)
return get_referenced_sha1(obj_file)
def fetch_git(url, directory, jobs, retry, timeout, http_headers):
""" Dump a git repository into the output directory """
assert os.path.isdir(directory), "%s is not a directory" % directory
assert jobs >= 1, "invalid number of jobs"
assert retry >= 1, "invalid number of retries"
assert timeout >= 1, "invalid timeout"
session = requests.Session()
session.verify = False
session.headers = http_headers
session.mount(url, requests.adapters.HTTPAdapter(max_retries=retry))
if os.listdir(directory):
printf("Warning: Destination '%s' is not empty\n", directory)
# find base url
url = url.rstrip("/")
if url.endswith("HEAD"):
url = url[:-4]
url = url.rstrip("/")
if url.endswith(".git"):
url = url[:-4]
url = url.rstrip("/")
# check for /.git/HEAD
printf("[-] Testing %s/.git/HEAD ", url)
response = session.get("%s/.git/HEAD" % url, allow_redirects=False)
printf("[%d]\n", response.status_code)
valid, error_message = verify_response(response)
if not valid:
printf(error_message, url, "/.git/HEAD", file=sys.stderr)
return 1
elif not re.match(r'^(ref:.*|[0-9a-f]{40}$)', response.text.strip()):
printf(
"error: %s/.git/HEAD is not a git HEAD file\n",
url,
file=sys.stderr,
)
return 1
# check for directory listing
printf("[-] Testing %s/.git/ ", url)
response = session.get("%s/.git/" % url, allow_redirects=False)
printf("[%d]\n", response.status_code)
if (
response.status_code == 200
and is_html(response)
and "HEAD" in get_indexed_files(response)
):
printf("[-] Fetching .git recursively\n")
process_tasks(
[".git/", ".gitignore"],
RecursiveDownloadWorker,
jobs,
args=(url, directory, retry, timeout, http_headers),
)
printf("[-] Running git checkout .\n")
os.chdir(directory)
subprocess.check_call(["git", "checkout", "."])
return 0
# no directory listing
printf("[-] Fetching common files\n")
tasks = [
".gitignore",
".git/COMMIT_EDITMSG",
".git/description",
".git/hooks/applypatch-msg.sample",
".git/hooks/commit-msg.sample",
".git/hooks/post-commit.sample",
".git/hooks/post-receive.sample",
".git/hooks/post-update.sample",
".git/hooks/pre-applypatch.sample",
".git/hooks/pre-commit.sample",
".git/hooks/pre-push.sample",
".git/hooks/pre-rebase.sample",
".git/hooks/pre-receive.sample",
".git/hooks/prepare-commit-msg.sample",
".git/hooks/update.sample",
".git/index",
".git/info/exclude",
".git/objects/info/packs",
]
process_tasks(
tasks,
DownloadWorker,
jobs,
args=(url, directory, retry, timeout, http_headers),
)
# find refs
printf("[-] Finding refs/\n")
tasks = [
".git/FETCH_HEAD",
".git/HEAD",
".git/ORIG_HEAD",
".git/config",
".git/info/refs",
".git/logs/HEAD",
".git/logs/refs/heads/master",
".git/logs/refs/remotes/origin/HEAD",
".git/logs/refs/remotes/origin/master",
".git/logs/refs/stash",
".git/packed-refs",
".git/refs/heads/master",
".git/refs/remotes/origin/HEAD",
".git/refs/remotes/origin/master",
".git/refs/stash",
".git/refs/wip/wtree/refs/heads/master", # Magit
".git/refs/wip/index/refs/heads/master", # Magit
]
process_tasks(
tasks,
FindRefsWorker,
jobs,
args=(url, directory, retry, timeout, http_headers),
)
# find packs
printf("[-] Finding packs\n")
tasks = []
# use .git/objects/info/packs to find packs
info_packs_path = os.path.join(
directory, ".git", "objects", "info", "packs"
)
if os.path.exists(info_packs_path):
with open(info_packs_path, "r") as f:
info_packs = f.read()
for sha1 in re.findall(r"pack-([a-f0-9]{40})\.pack", info_packs):
tasks.append(".git/objects/pack/pack-%s.idx" % sha1)
tasks.append(".git/objects/pack/pack-%s.pack" % sha1)
process_tasks(
tasks,
DownloadWorker,
jobs,
args=(url, directory, retry, timeout, http_headers),
)
# find objects
printf("[-] Finding objects\n")
objs = set()
packed_objs = set()
# .git/packed-refs, .git/info/refs, .git/refs/*, .git/logs/*
files = [
os.path.join(directory, ".git", "packed-refs"),
os.path.join(directory, ".git", "info", "refs"),
os.path.join(directory, ".git", "FETCH_HEAD"),
os.path.join(directory, ".git", "ORIG_HEAD"),
]
for dirpath, _, filenames in os.walk(
os.path.join(directory, ".git", "refs")
):
for filename in filenames:
files.append(os.path.join(dirpath, filename))
for dirpath, _, filenames in os.walk(
os.path.join(directory, ".git", "logs")
):
for filename in filenames:
files.append(os.path.join(dirpath, filename))
for filepath in files:
if not os.path.exists(filepath):
continue
with open(filepath, "r") as f:
content = f.read()
for obj in re.findall(r"(^|\s)([a-f0-9]{40})($|\s)", content):
obj = obj[1]
objs.add(obj)
# use .git/index to find objects
index_path = os.path.join(directory, ".git", "index")
if os.path.exists(index_path):
index = dulwich.index.Index(index_path)
for entry in index.iterblobs():
objs.add(entry[1].decode())
# use packs to find more objects to fetch, and objects that are packed
pack_file_dir = os.path.join(directory, ".git", "objects", "pack")
if os.path.isdir(pack_file_dir):
for filename in os.listdir(pack_file_dir):
if filename.startswith("pack-") and filename.endswith(".pack"):
pack_data_path = os.path.join(pack_file_dir, filename)
pack_idx_path = os.path.join(
pack_file_dir, filename[:-5] + ".idx"
)
pack_data = dulwich.pack.PackData(pack_data_path)
pack_idx = dulwich.pack.load_pack_index(pack_idx_path)
pack = dulwich.pack.Pack.from_objects(pack_data, pack_idx)
for obj_file in pack.iterobjects():
packed_objs.add(obj_file.sha().hexdigest())
objs |= set(get_referenced_sha1(obj_file))
# fetch all objects
printf("[-] Fetching objects\n")
process_tasks(
objs,
FindObjectsWorker,
jobs,
args=(url, directory, retry, timeout, http_headers),
tasks_done=packed_objs,
)
# git checkout
printf("[-] Running git checkout .\n")
os.chdir(directory)
# ignore errors
subprocess.call(["git", "checkout", "."], stderr=open(os.devnull, "wb"))
return 0
def main():
parser = argparse.ArgumentParser(
usage="git-dumper [options] URL DIR",
description="Dump a git repository from a website.",
)
parser.add_argument("url", metavar="URL", help="url")
parser.add_argument("directory", metavar="DIR", help="output directory")
parser.add_argument("--proxy", help="use the specified proxy")
parser.add_argument(
"-j",
"--jobs",
type=int,
default=10,
help="number of simultaneous requests",
)
parser.add_argument(
"-r",
"--retry",
type=int,
default=3,
help="number of request attempts before giving up",
)
parser.add_argument(
"-t",
"--timeout",
type=int,
default=3,
help="maximum time in seconds before giving up",
)
parser.add_argument(
"-u",
"--user-agent",
type=str,
default="Mozilla/5.0 (Windows NT 10.0; rv:78.0) Gecko/20100101 Firefox/78.0",
help="user-agent to use for requests",
)
parser.add_argument(
"-H",
"--header",
type=str,
action="append",
help="additional http headers, e.g `NAME=VALUE`",
)
args = parser.parse_args()
# jobs
if args.jobs < 1:
parser.error("invalid number of jobs, got `%d`" % args.jobs)
# retry
if args.retry < 1:
parser.error("invalid number of retries, got `%d`" % args.retry)
# timeout
if args.timeout < 1:
parser.error("invalid timeout, got `%d`" % args.timeout)
# header
http_headers = {"User-Agent": args.user_agent}
if args.header:
for header in args.header:
tokens = header.split("=", maxsplit=1)
if len(tokens) != 2:
parser.error(
"http header must have the form NAME=VALUE, got `%s`"
% header
)
name, value = tokens
http_headers[name.strip()] = value.strip()
# proxy
if args.proxy:
proxy_valid = False
for pattern, proxy_type in [
(r"^socks5:(.*):(\d+)$", socks.PROXY_TYPE_SOCKS5),
(r"^socks4:(.*):(\d+)$", socks.PROXY_TYPE_SOCKS4),
(r"^http://(.*):(\d+)$", socks.PROXY_TYPE_HTTP),
(r"^(.*):(\d+)$", socks.PROXY_TYPE_SOCKS5),
]:
m = re.match(pattern, args.proxy)
if m:
socks.setdefaultproxy(proxy_type, m.group(1), int(m.group(2)))
socket.socket = socks.socksocket
proxy_valid = True
break
if not proxy_valid:
parser.error("invalid proxy, got `%s`" % args.proxy)
# output directory
if not os.path.exists(args.directory):
os.makedirs(args.directory)
if not os.path.isdir(args.directory):
parser.error("`%s` is not a directory" % args.directory)
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
# fetch everything
sys.exit(
fetch_git(
args.url,
args.directory,
args.jobs,
args.retry,
args.timeout,
http_headers,
)
)
if __name__ == "__main__":
main()
| mit | -3,200,989,026,420,141,600 | 28.343923 | 85 | 0.544975 | false |
gudcjfdldu/volatility | volatility/plugins/netscan.py | 44 | 9924 | # Volatility
#
# Authors:
# Michael Hale Ligh <[email protected]>
#
# This file is part of Volatility.
#
# Volatility is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# Volatility is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Volatility. If not, see <http://www.gnu.org/licenses/>.
#
import volatility.utils as utils
import volatility.plugins.common as common
import volatility.scan as scan
import volatility.obj as obj
import volatility.cache as cache
import volatility.debug as debug
import socket
import volatility.plugins.overlays.windows.tcpip_vtypes as tcpip_vtypes
# Python's socket.AF_INET6 is 0x1e but Microsoft defines it
# as a constant value of 0x17 in their source code. Thus we
# need Microsoft's since that's what is found in memory.
AF_INET = 2
AF_INET6 = 0x17
# String representations of INADDR_ANY and INADDR6_ANY
inaddr_any = utils.inet_ntop(socket.AF_INET, '\0' * 4)
inaddr6_any = utils.inet_ntop(socket.AF_INET6, '\0' * 16)
#--------------------------------------------------------------------------------
# pool scanners
#--------------------------------------------------------------------------------
class PoolScanUdpEndpoint(scan.PoolScanner):
"""PoolScanner for Udp Endpoints"""
def object_offset(self, found, address_space):
return found + (address_space.profile.get_obj_size("_POOL_HEADER") -
address_space.profile.get_obj_offset("_POOL_HEADER", "PoolTag"))
checks = [ ('PoolTagCheck', dict(tag = "UdpA")),
# Seen as 0xa8 on Vista SP0, 0xb0 on Vista SP2, and 0xb8 on 7
# Seen as 0x150 on Win7 SP0 x64
('CheckPoolSize', dict(condition = lambda x: x >= 0xa8)),
('CheckPoolType', dict(non_paged = True, free = True)),
('CheckPoolIndex', dict(value = 0)),
]
class PoolScanTcpListener(PoolScanUdpEndpoint):
"""PoolScanner for Tcp Listeners"""
checks = [ ('PoolTagCheck', dict(tag = "TcpL")),
# Seen as 0x120 on Win7 SP0 x64
('CheckPoolSize', dict(condition = lambda x: x >= 0xa8)),
('CheckPoolType', dict(non_paged = True, free = True)),
('CheckPoolIndex', dict(value = 0)),
]
class PoolScanTcpEndpoint(PoolScanUdpEndpoint):
"""PoolScanner for TCP Endpoints"""
checks = [ ('PoolTagCheck', dict(tag = "TcpE")),
# Seen as 0x1f0 on Vista SP0, 0x1f8 on Vista SP2 and 0x210 on 7
# Seen as 0x320 on Win7 SP0 x64
('CheckPoolSize', dict(condition = lambda x: x >= 0x1f0)),
('CheckPoolType', dict(non_paged = True, free = True)),
('CheckPoolIndex', dict(value = 0)),
]
#--------------------------------------------------------------------------------
# object classes
#--------------------------------------------------------------------------------
class _TCP_LISTENER(obj.CType):
"""Class for objects found in TcpL pools"""
@property
def AddressFamily(self):
return self.InetAF.dereference().AddressFamily
@property
def Owner(self):
return self.m('Owner').dereference()
def dual_stack_sockets(self):
"""Handle Windows dual-stack sockets"""
# If this pointer is valid, the socket is bound to
# a specific IP address. Otherwise, the socket is
# listening on all IP addresses of the address family.
local_addr = self.LocalAddr.dereference()
# Note the remote address is always INADDR_ANY or
# INADDR6_ANY for sockets. The moment a client
# connects to the listener, a TCP_ENDPOINT is created
# and that structure contains the remote address.
if local_addr != None:
inaddr = local_addr.pData.dereference().dereference()
if self.AddressFamily == AF_INET:
yield "v4", inaddr.addr4, inaddr_any
else:
yield "v6", inaddr.addr6, inaddr6_any
else:
yield "v4", inaddr_any, inaddr_any
if self.AddressFamily == AF_INET6:
yield "v6", inaddr6_any, inaddr6_any
class _TCP_ENDPOINT(_TCP_LISTENER):
"""Class for objects found in TcpE pools"""
def _ipv4_or_ipv6(self, in_addr):
if self.AddressFamily == AF_INET:
return in_addr.addr4
else:
return in_addr.addr6
@property
def LocalAddress(self):
inaddr = self.AddrInfo.dereference().Local.\
pData.dereference().dereference()
return self._ipv4_or_ipv6(inaddr)
@property
def RemoteAddress(self):
inaddr = self.AddrInfo.dereference().\
Remote.dereference()
return self._ipv4_or_ipv6(inaddr)
class _UDP_ENDPOINT(_TCP_LISTENER):
"""Class for objects found in UdpA pools"""
#--------------------------------------------------------------------------------
# profile modifications
#--------------------------------------------------------------------------------
class NetscanObjectClasses(obj.ProfileModification):
"""Network OCs for Vista, 2008, and 7 x86 and x64"""
before = ['WindowsObjectClasses']
conditions = {'os': lambda x: x == 'windows',
'major': lambda x : x == 6,
'minor': lambda x : x >= 0}
def modification(self, profile):
profile.object_classes.update({
'_TCP_LISTENER': _TCP_LISTENER,
'_TCP_ENDPOINT': _TCP_ENDPOINT,
'_UDP_ENDPOINT': _UDP_ENDPOINT,
})
#--------------------------------------------------------------------------------
# netscan plugin
#--------------------------------------------------------------------------------
class Netscan(common.AbstractWindowsCommand):
"""Scan a Vista, 2008 or Windows 7 image for connections and sockets"""
@staticmethod
def is_valid_profile(profile):
return (profile.metadata.get('os', 'unknown') == 'windows' and
profile.metadata.get('major', 0) == 6)
@cache.CacheDecorator("tests/netscan")
def calculate(self):
# Virtual kernel space for dereferencing pointers
kernel_space = utils.load_as(self._config)
# Physical space for scanning
flat_space = utils.load_as(self._config, astype = 'physical')
if not self.is_valid_profile(kernel_space.profile):
debug.error("This command does not support the selected profile.")
# Scan for TCP listeners also known as sockets
for offset in PoolScanTcpListener().scan(flat_space):
tcpentry = obj.Object('_TCP_LISTENER', offset = offset,
vm = flat_space, native_vm = kernel_space)
# Only accept IPv4 or IPv6
if tcpentry.AddressFamily not in (AF_INET, AF_INET6):
continue
# For TcpL, the state is always listening and the remote port is zero
for ver, laddr, raddr in tcpentry.dual_stack_sockets():
yield tcpentry, "TCP" + ver, laddr, tcpentry.Port, raddr, 0, "LISTENING"
# Scan for TCP endpoints also known as connections
for offset in PoolScanTcpEndpoint().scan(flat_space):
tcpentry = obj.Object('_TCP_ENDPOINT', offset = offset,
vm = flat_space, native_vm = kernel_space)
if tcpentry.AddressFamily == AF_INET:
proto = "TCPv4"
elif tcpentry.AddressFamily == AF_INET6:
proto = "TCPv6"
else:
continue
# These are our sanity checks
if (tcpentry.State.v() not in tcpip_vtypes.TCP_STATE_ENUM or
(not tcpentry.LocalAddress and (not tcpentry.Owner or
tcpentry.Owner.UniqueProcessId == 0 or
tcpentry.Owner.UniqueProcessId > 65535))):
continue
yield tcpentry, proto, tcpentry.LocalAddress, tcpentry.LocalPort, \
tcpentry.RemoteAddress, tcpentry.RemotePort, tcpentry.State
# Scan for UDP endpoints
for offset in PoolScanUdpEndpoint().scan(flat_space):
udpentry = obj.Object('_UDP_ENDPOINT', offset = offset,
vm = flat_space, native_vm = kernel_space)
# Only accept IPv4 or IPv6
if udpentry.AddressFamily not in (AF_INET, AF_INET6):
continue
# For UdpA, the state is always blank and the remote end is asterisks
for ver, laddr, _ in udpentry.dual_stack_sockets():
yield udpentry, "UDP" + ver, laddr, udpentry.Port, "*", "*", ""
def render_text(self, outfd, data):
outfd.write("{0:<10} {1:<8} {2:<30} {3:<20} {4:<16} {5:<8} {6:<14} {7}\n".format(
"Offset(P)", "Proto", "Local Address", "Foreign Address",
"State", "Pid", "Owner", "Created"))
for net_object, proto, laddr, lport, raddr, rport, state in data:
lendpoint = "{0}:{1}".format(laddr, lport)
rendpoint = "{0}:{1}".format(raddr, rport)
outfd.write("{0:<#10x} {1:<8} {2:<30} {3:<20} {4:<16} {5:<8} {6:<14} {7}\n".format(
net_object.obj_offset, proto, lendpoint,
rendpoint, state, net_object.Owner.UniqueProcessId,
net_object.Owner.ImageFileName,
str(net_object.CreateTime or '')
))
| gpl-2.0 | 1,029,224,827,781,066,900 | 37.614786 | 95 | 0.561165 | false |
Slezhuk/ansible | lib/ansible/module_utils/k8s_common.py | 62 | 12599 | #
# Copyright 2017 Red Hat | Ansible
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
import copy
import json
import os
from ansible.module_utils.basic import AnsibleModule
try:
from openshift.helper.ansible import KubernetesAnsibleModuleHelper, ARG_ATTRIBUTES_BLACKLIST
from openshift.helper.exceptions import KubernetesException
HAS_K8S_MODULE_HELPER = True
except ImportError as exc:
HAS_K8S_MODULE_HELPER = False
try:
import yaml
HAS_YAML = True
except ImportError:
HAS_YAML = False
class KubernetesAnsibleException(Exception):
pass
class KubernetesAnsibleModule(AnsibleModule):
@staticmethod
def get_helper(api_version, kind):
return KubernetesAnsibleModuleHelper(api_version, kind)
def __init__(self, kind, api_version):
self.api_version = api_version
self.kind = kind
self.argspec_cache = None
if not HAS_K8S_MODULE_HELPER:
raise KubernetesAnsibleException(
"This module requires the OpenShift Python client. Try `pip install openshift`"
)
if not HAS_YAML:
raise KubernetesAnsibleException(
"This module requires PyYAML. Try `pip install PyYAML`"
)
try:
self.helper = self.get_helper(api_version, kind)
except Exception as exc:
raise KubernetesAnsibleException(
"Error initializing AnsibleModuleHelper: {}".format(exc)
)
mutually_exclusive = (
('resource_definition', 'src'),
)
AnsibleModule.__init__(self,
argument_spec=self.argspec,
supports_check_mode=True,
mutually_exclusive=mutually_exclusive)
@property
def argspec(self):
"""
Build the module argument spec from the helper.argspec, removing any extra attributes not needed by
Ansible.
:return: dict: a valid Ansible argument spec
"""
if not self.argspec_cache:
spec = {}
for arg_name, arg_properties in self.helper.argspec.items():
spec[arg_name] = {}
for option, option_value in arg_properties.items():
if option not in ARG_ATTRIBUTES_BLACKLIST:
if option == 'choices':
if isinstance(option_value, dict):
spec[arg_name]['choices'] = [value for key, value in option_value.items()]
else:
spec[arg_name]['choices'] = option_value
else:
spec[arg_name][option] = option_value
self.argspec_cache = spec
return self.argspec_cache
def execute_module(self):
"""
Performs basic CRUD operations on the model object. Ends by calling
AnsibleModule.fail_json(), if an error is encountered, otherwise
AnsibleModule.exit_json() with a dict containing:
changed: boolean
api_version: the API version
<kind>: a dict representing the object's state
:return: None
"""
if self.params.get('debug'):
self.helper.enable_debug(reset_logfile=False)
self.helper.log_argspec()
resource_definition = self.params.get('resource_definition')
if self.params.get('src'):
resource_definition = self.load_resource_definition(self.params['src'])
if resource_definition:
resource_params = self.resource_to_parameters(resource_definition)
self.params.update(resource_params)
state = self.params.get('state', None)
force = self.params.get('force', False)
name = self.params.get('name')
namespace = self.params.get('namespace', None)
existing = None
return_attributes = dict(changed=False, api_version=self.api_version)
return_attributes[self.helper.base_model_name_snake] = {}
try:
auth_options = {}
for key, value in self.helper.argspec.items():
if value.get('auth_option') and self.params.get(key) is not None:
auth_options[key] = self.params[key]
self.helper.set_client_config(**auth_options)
except KubernetesException as e:
self.fail_json(msg='Error loading config', error=str(e))
if state is None:
# This is a list, rollback or ? module with no 'state' param
if self.helper.base_model_name_snake.endswith('list'):
# For list modules, execute a GET, and exit
k8s_obj = self._read(name, namespace)
return_attributes[self.kind] = k8s_obj.to_dict()
self.exit_json(**return_attributes)
elif self.helper.has_method('create'):
# For a rollback, execute a POST, and exit
k8s_obj = self._create(namespace)
return_attributes[self.kind] = k8s_obj.to_dict()
return_attributes['changed'] = True
self.exit_json(**return_attributes)
else:
self.fail_json(msg="Missing state parameter. Expected one of: present, absent")
# CRUD modules
try:
existing = self.helper.get_object(name, namespace)
except KubernetesException as exc:
self.fail_json(msg='Failed to retrieve requested object: {}'.format(exc.message),
error=exc.value.get('status'))
if state == 'absent':
if not existing:
# The object already does not exist
self.exit_json(**return_attributes)
else:
# Delete the object
if not self.check_mode:
try:
self.helper.delete_object(name, namespace)
except KubernetesException as exc:
self.fail_json(msg="Failed to delete object: {}".format(exc.message),
error=exc.value.get('status'))
return_attributes['changed'] = True
self.exit_json(**return_attributes)
else:
if not existing:
k8s_obj = self._create(namespace)
return_attributes[self.kind] = k8s_obj.to_dict()
return_attributes['changed'] = True
self.exit_json(**return_attributes)
if existing and force:
k8s_obj = None
request_body = self.helper.request_body_from_params(self.params)
if not self.check_mode:
try:
k8s_obj = self.helper.replace_object(name, namespace, body=request_body)
except KubernetesException as exc:
self.fail_json(msg="Failed to replace object: {}".format(exc.message),
error=exc.value.get('status'))
return_attributes[self.kind] = k8s_obj.to_dict()
return_attributes['changed'] = True
self.exit_json(**return_attributes)
# Check if existing object should be patched
k8s_obj = copy.deepcopy(existing)
try:
self.helper.object_from_params(self.params, obj=k8s_obj)
except KubernetesException as exc:
self.fail_json(msg="Failed to patch object: {}".format(exc.message))
match, diff = self.helper.objects_match(existing, k8s_obj)
if match:
return_attributes[self.kind] = existing.to_dict()
self.exit_json(**return_attributes)
else:
self.helper.log('Existing:')
self.helper.log(json.dumps(existing.to_dict(), indent=4))
self.helper.log('\nDifferences:')
self.helper.log(json.dumps(diff, indent=4))
# Differences exist between the existing obj and requested params
if not self.check_mode:
try:
k8s_obj = self.helper.patch_object(name, namespace, k8s_obj)
except KubernetesException as exc:
self.fail_json(msg="Failed to patch object: {}".format(exc.message))
return_attributes[self.kind] = k8s_obj.to_dict()
return_attributes['changed'] = True
self.exit_json(**return_attributes)
def _create(self, namespace):
request_body = None
k8s_obj = None
try:
request_body = self.helper.request_body_from_params(self.params)
except KubernetesException as exc:
self.fail_json(msg="Failed to create object: {}".format(exc.message))
if not self.check_mode:
try:
k8s_obj = self.helper.create_object(namespace, body=request_body)
except KubernetesException as exc:
self.fail_json(msg="Failed to create object: {}".format(exc.message),
error=exc.value.get('status'))
return k8s_obj
def _read(self, name, namespace):
k8s_obj = None
try:
k8s_obj = self.helper.get_object(name, namespace)
except KubernetesException as exc:
self.fail_json(msg='Failed to retrieve requested object',
error=exc.value.get('status'))
return k8s_obj
def load_resource_definition(self, src):
""" Load the requested src path """
result = None
path = os.path.normpath(src)
self.helper.log("Reading definition from {}".format(path))
if not os.path.exists(path):
self.fail_json(msg="Error accessing {}. Does the file exist?".format(path))
try:
result = yaml.safe_load(open(path, 'r'))
except (IOError, yaml.YAMLError) as exc:
self.fail_json(msg="Error loading resource_definition: {}".format(exc))
return result
def resource_to_parameters(self, resource):
""" Converts a resource definition to module parameters """
parameters = {}
for key, value in resource.items():
if key in ('apiVersion', 'kind', 'status'):
continue
elif key == 'metadata' and isinstance(value, dict):
for meta_key, meta_value in value.items():
if meta_key in ('name', 'namespace', 'labels', 'annotations'):
parameters[meta_key] = meta_value
elif key in self.helper.argspec and value is not None:
parameters[key] = value
elif isinstance(value, dict):
self._add_parameter(value, [key], parameters)
self.helper.log("Request to parameters: {}".format(json.dumps(parameters)))
return parameters
def _add_parameter(self, request, path, parameters):
for key, value in request.items():
if path:
param_name = '_'.join(path + [self.helper.attribute_to_snake(key)])
else:
param_name = self.helper.attribute_to_snake(key)
if param_name in self.helper.argspec and value is not None:
parameters[param_name] = value
elif isinstance(value, dict):
continue_path = copy.copy(path) if path else []
continue_path.append(self.helper.attribute_to_snake(key))
self._add_parameter(value, continue_path, parameters)
else:
self.fail_json(
msg=("Error parsing resource definition. Encountered {}, which does not map to a module "
"parameter. If this looks like a problem with the module, please open an issue at "
"github.com/openshift/openshift-restclient-python/issues").format(param_name)
)
| gpl-3.0 | 886,119,348,391,915,600 | 41.420875 | 109 | 0.572982 | false |
alanch-ms/PTVS | Python/Tests/TestData/VirtualEnv/env/Lib/UserDict.py | 358 | 5811 | """A more or less complete user-defined wrapper around dictionary objects."""
class UserDict:
def __init__(self, dict=None, **kwargs):
self.data = {}
if dict is not None:
self.update(dict)
if len(kwargs):
self.update(kwargs)
def __repr__(self): return repr(self.data)
def __cmp__(self, dict):
if isinstance(dict, UserDict):
return cmp(self.data, dict.data)
else:
return cmp(self.data, dict)
__hash__ = None # Avoid Py3k warning
def __len__(self): return len(self.data)
def __getitem__(self, key):
if key in self.data:
return self.data[key]
if hasattr(self.__class__, "__missing__"):
return self.__class__.__missing__(self, key)
raise KeyError(key)
def __setitem__(self, key, item): self.data[key] = item
def __delitem__(self, key): del self.data[key]
def clear(self): self.data.clear()
def copy(self):
if self.__class__ is UserDict:
return UserDict(self.data.copy())
import copy
data = self.data
try:
self.data = {}
c = copy.copy(self)
finally:
self.data = data
c.update(self)
return c
def keys(self): return self.data.keys()
def items(self): return self.data.items()
def iteritems(self): return self.data.iteritems()
def iterkeys(self): return self.data.iterkeys()
def itervalues(self): return self.data.itervalues()
def values(self): return self.data.values()
def has_key(self, key): return key in self.data
def update(self, dict=None, **kwargs):
if dict is None:
pass
elif isinstance(dict, UserDict):
self.data.update(dict.data)
elif isinstance(dict, type({})) or not hasattr(dict, 'items'):
self.data.update(dict)
else:
for k, v in dict.items():
self[k] = v
if len(kwargs):
self.data.update(kwargs)
def get(self, key, failobj=None):
if key not in self:
return failobj
return self[key]
def setdefault(self, key, failobj=None):
if key not in self:
self[key] = failobj
return self[key]
def pop(self, key, *args):
return self.data.pop(key, *args)
def popitem(self):
return self.data.popitem()
def __contains__(self, key):
return key in self.data
@classmethod
def fromkeys(cls, iterable, value=None):
d = cls()
for key in iterable:
d[key] = value
return d
class IterableUserDict(UserDict):
def __iter__(self):
return iter(self.data)
import _abcoll
_abcoll.MutableMapping.register(IterableUserDict)
class DictMixin:
# Mixin defining all dictionary methods for classes that already have
# a minimum dictionary interface including getitem, setitem, delitem,
# and keys. Without knowledge of the subclass constructor, the mixin
# does not define __init__() or copy(). In addition to the four base
# methods, progressively more efficiency comes with defining
# __contains__(), __iter__(), and iteritems().
# second level definitions support higher levels
def __iter__(self):
for k in self.keys():
yield k
def has_key(self, key):
try:
self[key]
except KeyError:
return False
return True
def __contains__(self, key):
return self.has_key(key)
# third level takes advantage of second level definitions
def iteritems(self):
for k in self:
yield (k, self[k])
def iterkeys(self):
return self.__iter__()
# fourth level uses definitions from lower levels
def itervalues(self):
for _, v in self.iteritems():
yield v
def values(self):
return [v for _, v in self.iteritems()]
def items(self):
return list(self.iteritems())
def clear(self):
for key in self.keys():
del self[key]
def setdefault(self, key, default=None):
try:
return self[key]
except KeyError:
self[key] = default
return default
def pop(self, key, *args):
if len(args) > 1:
raise TypeError, "pop expected at most 2 arguments, got "\
+ repr(1 + len(args))
try:
value = self[key]
except KeyError:
if args:
return args[0]
raise
del self[key]
return value
def popitem(self):
try:
k, v = self.iteritems().next()
except StopIteration:
raise KeyError, 'container is empty'
del self[k]
return (k, v)
def update(self, other=None, **kwargs):
# Make progressively weaker assumptions about "other"
if other is None:
pass
elif hasattr(other, 'iteritems'): # iteritems saves memory and lookups
for k, v in other.iteritems():
self[k] = v
elif hasattr(other, 'keys'):
for k in other.keys():
self[k] = other[k]
else:
for k, v in other:
self[k] = v
if kwargs:
self.update(kwargs)
def get(self, key, default=None):
try:
return self[key]
except KeyError:
return default
def __repr__(self):
return repr(dict(self.iteritems()))
def __cmp__(self, other):
if other is None:
return 1
if isinstance(other, DictMixin):
other = dict(other.iteritems())
return cmp(dict(self.iteritems()), other)
def __len__(self):
return len(self.keys())
| apache-2.0 | -679,718,108,906,633,600 | 31.283333 | 79 | 0.553089 | false |
jonparrott/gcloud-python | securitycenter/google/cloud/securitycenter_v1beta1/proto/securitycenter_service_pb2_grpc.py | 2 | 21413 | # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
import grpc
from google.cloud.securitycenter_v1beta1.proto import finding_pb2 as google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_finding__pb2
from google.cloud.securitycenter_v1beta1.proto import organization_settings_pb2 as google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_organization__settings__pb2
from google.cloud.securitycenter_v1beta1.proto import security_marks_pb2 as google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_security__marks__pb2
from google.cloud.securitycenter_v1beta1.proto import securitycenter_service_pb2 as google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_securitycenter__service__pb2
from google.cloud.securitycenter_v1beta1.proto import source_pb2 as google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_source__pb2
from google.iam.v1 import iam_policy_pb2 as google_dot_iam_dot_v1_dot_iam__policy__pb2
from google.iam.v1 import policy_pb2 as google_dot_iam_dot_v1_dot_policy__pb2
from google.longrunning import operations_pb2 as google_dot_longrunning_dot_operations__pb2
class SecurityCenterStub(object):
"""V1 Beta APIs for Security Center service.
"""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.CreateSource = channel.unary_unary(
'/google.cloud.securitycenter.v1beta1.SecurityCenter/CreateSource',
request_serializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_securitycenter__service__pb2.CreateSourceRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_source__pb2.Source.FromString,
)
self.CreateFinding = channel.unary_unary(
'/google.cloud.securitycenter.v1beta1.SecurityCenter/CreateFinding',
request_serializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_securitycenter__service__pb2.CreateFindingRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_finding__pb2.Finding.FromString,
)
self.GetIamPolicy = channel.unary_unary(
'/google.cloud.securitycenter.v1beta1.SecurityCenter/GetIamPolicy',
request_serializer=google_dot_iam_dot_v1_dot_iam__policy__pb2.GetIamPolicyRequest.SerializeToString,
response_deserializer=google_dot_iam_dot_v1_dot_policy__pb2.Policy.FromString,
)
self.GetOrganizationSettings = channel.unary_unary(
'/google.cloud.securitycenter.v1beta1.SecurityCenter/GetOrganizationSettings',
request_serializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_securitycenter__service__pb2.GetOrganizationSettingsRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_organization__settings__pb2.OrganizationSettings.FromString,
)
self.GetSource = channel.unary_unary(
'/google.cloud.securitycenter.v1beta1.SecurityCenter/GetSource',
request_serializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_securitycenter__service__pb2.GetSourceRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_source__pb2.Source.FromString,
)
self.GroupAssets = channel.unary_unary(
'/google.cloud.securitycenter.v1beta1.SecurityCenter/GroupAssets',
request_serializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_securitycenter__service__pb2.GroupAssetsRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_securitycenter__service__pb2.GroupAssetsResponse.FromString,
)
self.GroupFindings = channel.unary_unary(
'/google.cloud.securitycenter.v1beta1.SecurityCenter/GroupFindings',
request_serializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_securitycenter__service__pb2.GroupFindingsRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_securitycenter__service__pb2.GroupFindingsResponse.FromString,
)
self.ListAssets = channel.unary_unary(
'/google.cloud.securitycenter.v1beta1.SecurityCenter/ListAssets',
request_serializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_securitycenter__service__pb2.ListAssetsRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_securitycenter__service__pb2.ListAssetsResponse.FromString,
)
self.ListFindings = channel.unary_unary(
'/google.cloud.securitycenter.v1beta1.SecurityCenter/ListFindings',
request_serializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_securitycenter__service__pb2.ListFindingsRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_securitycenter__service__pb2.ListFindingsResponse.FromString,
)
self.ListSources = channel.unary_unary(
'/google.cloud.securitycenter.v1beta1.SecurityCenter/ListSources',
request_serializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_securitycenter__service__pb2.ListSourcesRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_securitycenter__service__pb2.ListSourcesResponse.FromString,
)
self.RunAssetDiscovery = channel.unary_unary(
'/google.cloud.securitycenter.v1beta1.SecurityCenter/RunAssetDiscovery',
request_serializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_securitycenter__service__pb2.RunAssetDiscoveryRequest.SerializeToString,
response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString,
)
self.SetFindingState = channel.unary_unary(
'/google.cloud.securitycenter.v1beta1.SecurityCenter/SetFindingState',
request_serializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_securitycenter__service__pb2.SetFindingStateRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_finding__pb2.Finding.FromString,
)
self.SetIamPolicy = channel.unary_unary(
'/google.cloud.securitycenter.v1beta1.SecurityCenter/SetIamPolicy',
request_serializer=google_dot_iam_dot_v1_dot_iam__policy__pb2.SetIamPolicyRequest.SerializeToString,
response_deserializer=google_dot_iam_dot_v1_dot_policy__pb2.Policy.FromString,
)
self.TestIamPermissions = channel.unary_unary(
'/google.cloud.securitycenter.v1beta1.SecurityCenter/TestIamPermissions',
request_serializer=google_dot_iam_dot_v1_dot_iam__policy__pb2.TestIamPermissionsRequest.SerializeToString,
response_deserializer=google_dot_iam_dot_v1_dot_iam__policy__pb2.TestIamPermissionsResponse.FromString,
)
self.UpdateFinding = channel.unary_unary(
'/google.cloud.securitycenter.v1beta1.SecurityCenter/UpdateFinding',
request_serializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_securitycenter__service__pb2.UpdateFindingRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_finding__pb2.Finding.FromString,
)
self.UpdateOrganizationSettings = channel.unary_unary(
'/google.cloud.securitycenter.v1beta1.SecurityCenter/UpdateOrganizationSettings',
request_serializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_securitycenter__service__pb2.UpdateOrganizationSettingsRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_organization__settings__pb2.OrganizationSettings.FromString,
)
self.UpdateSource = channel.unary_unary(
'/google.cloud.securitycenter.v1beta1.SecurityCenter/UpdateSource',
request_serializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_securitycenter__service__pb2.UpdateSourceRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_source__pb2.Source.FromString,
)
self.UpdateSecurityMarks = channel.unary_unary(
'/google.cloud.securitycenter.v1beta1.SecurityCenter/UpdateSecurityMarks',
request_serializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_securitycenter__service__pb2.UpdateSecurityMarksRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_security__marks__pb2.SecurityMarks.FromString,
)
class SecurityCenterServicer(object):
"""V1 Beta APIs for Security Center service.
"""
def CreateSource(self, request, context):
"""Creates a source.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def CreateFinding(self, request, context):
"""Creates a finding. The corresponding source must exist for finding creation
to succeed.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetIamPolicy(self, request, context):
"""Gets the access control policy on the specified Source.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetOrganizationSettings(self, request, context):
"""Gets the settings for an organization.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetSource(self, request, context):
"""Gets a source.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GroupAssets(self, request, context):
"""Filters an organization's assets and groups them by their specified
properties.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GroupFindings(self, request, context):
"""Filters an organization or source's findings and groups them by their
specified properties.
To group across all sources provide a `-` as the source id.
Example: /v1beta1/organizations/123/sources/-/findings
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def ListAssets(self, request, context):
"""Lists an organization's assets.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def ListFindings(self, request, context):
"""Lists an organization or source's findings.
To list across all sources provide a `-` as the source id.
Example: /v1beta1/organizations/123/sources/-/findings
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def ListSources(self, request, context):
"""Lists all sources belonging to an organization.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def RunAssetDiscovery(self, request, context):
"""Runs asset discovery. The discovery is tracked with a long-running
operation.
This API can only be called with limited frequency for an organization. If
it is called too frequently the caller will receive a TOO_MANY_REQUESTS
error.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def SetFindingState(self, request, context):
"""Updates the state of a finding.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def SetIamPolicy(self, request, context):
"""Sets the access control policy on the specified Source.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def TestIamPermissions(self, request, context):
"""Returns the permissions that a caller has on the specified source.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def UpdateFinding(self, request, context):
"""Creates or updates a finding. The corresponding source must exist for a
finding creation to succeed.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def UpdateOrganizationSettings(self, request, context):
"""Updates an organization's settings.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def UpdateSource(self, request, context):
"""Updates a source.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def UpdateSecurityMarks(self, request, context):
"""Updates security marks.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_SecurityCenterServicer_to_server(servicer, server):
rpc_method_handlers = {
'CreateSource': grpc.unary_unary_rpc_method_handler(
servicer.CreateSource,
request_deserializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_securitycenter__service__pb2.CreateSourceRequest.FromString,
response_serializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_source__pb2.Source.SerializeToString,
),
'CreateFinding': grpc.unary_unary_rpc_method_handler(
servicer.CreateFinding,
request_deserializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_securitycenter__service__pb2.CreateFindingRequest.FromString,
response_serializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_finding__pb2.Finding.SerializeToString,
),
'GetIamPolicy': grpc.unary_unary_rpc_method_handler(
servicer.GetIamPolicy,
request_deserializer=google_dot_iam_dot_v1_dot_iam__policy__pb2.GetIamPolicyRequest.FromString,
response_serializer=google_dot_iam_dot_v1_dot_policy__pb2.Policy.SerializeToString,
),
'GetOrganizationSettings': grpc.unary_unary_rpc_method_handler(
servicer.GetOrganizationSettings,
request_deserializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_securitycenter__service__pb2.GetOrganizationSettingsRequest.FromString,
response_serializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_organization__settings__pb2.OrganizationSettings.SerializeToString,
),
'GetSource': grpc.unary_unary_rpc_method_handler(
servicer.GetSource,
request_deserializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_securitycenter__service__pb2.GetSourceRequest.FromString,
response_serializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_source__pb2.Source.SerializeToString,
),
'GroupAssets': grpc.unary_unary_rpc_method_handler(
servicer.GroupAssets,
request_deserializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_securitycenter__service__pb2.GroupAssetsRequest.FromString,
response_serializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_securitycenter__service__pb2.GroupAssetsResponse.SerializeToString,
),
'GroupFindings': grpc.unary_unary_rpc_method_handler(
servicer.GroupFindings,
request_deserializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_securitycenter__service__pb2.GroupFindingsRequest.FromString,
response_serializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_securitycenter__service__pb2.GroupFindingsResponse.SerializeToString,
),
'ListAssets': grpc.unary_unary_rpc_method_handler(
servicer.ListAssets,
request_deserializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_securitycenter__service__pb2.ListAssetsRequest.FromString,
response_serializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_securitycenter__service__pb2.ListAssetsResponse.SerializeToString,
),
'ListFindings': grpc.unary_unary_rpc_method_handler(
servicer.ListFindings,
request_deserializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_securitycenter__service__pb2.ListFindingsRequest.FromString,
response_serializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_securitycenter__service__pb2.ListFindingsResponse.SerializeToString,
),
'ListSources': grpc.unary_unary_rpc_method_handler(
servicer.ListSources,
request_deserializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_securitycenter__service__pb2.ListSourcesRequest.FromString,
response_serializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_securitycenter__service__pb2.ListSourcesResponse.SerializeToString,
),
'RunAssetDiscovery': grpc.unary_unary_rpc_method_handler(
servicer.RunAssetDiscovery,
request_deserializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_securitycenter__service__pb2.RunAssetDiscoveryRequest.FromString,
response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString,
),
'SetFindingState': grpc.unary_unary_rpc_method_handler(
servicer.SetFindingState,
request_deserializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_securitycenter__service__pb2.SetFindingStateRequest.FromString,
response_serializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_finding__pb2.Finding.SerializeToString,
),
'SetIamPolicy': grpc.unary_unary_rpc_method_handler(
servicer.SetIamPolicy,
request_deserializer=google_dot_iam_dot_v1_dot_iam__policy__pb2.SetIamPolicyRequest.FromString,
response_serializer=google_dot_iam_dot_v1_dot_policy__pb2.Policy.SerializeToString,
),
'TestIamPermissions': grpc.unary_unary_rpc_method_handler(
servicer.TestIamPermissions,
request_deserializer=google_dot_iam_dot_v1_dot_iam__policy__pb2.TestIamPermissionsRequest.FromString,
response_serializer=google_dot_iam_dot_v1_dot_iam__policy__pb2.TestIamPermissionsResponse.SerializeToString,
),
'UpdateFinding': grpc.unary_unary_rpc_method_handler(
servicer.UpdateFinding,
request_deserializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_securitycenter__service__pb2.UpdateFindingRequest.FromString,
response_serializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_finding__pb2.Finding.SerializeToString,
),
'UpdateOrganizationSettings': grpc.unary_unary_rpc_method_handler(
servicer.UpdateOrganizationSettings,
request_deserializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_securitycenter__service__pb2.UpdateOrganizationSettingsRequest.FromString,
response_serializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_organization__settings__pb2.OrganizationSettings.SerializeToString,
),
'UpdateSource': grpc.unary_unary_rpc_method_handler(
servicer.UpdateSource,
request_deserializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_securitycenter__service__pb2.UpdateSourceRequest.FromString,
response_serializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_source__pb2.Source.SerializeToString,
),
'UpdateSecurityMarks': grpc.unary_unary_rpc_method_handler(
servicer.UpdateSecurityMarks,
request_deserializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_securitycenter__service__pb2.UpdateSecurityMarksRequest.FromString,
response_serializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_security__marks__pb2.SecurityMarks.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'google.cloud.securitycenter.v1beta1.SecurityCenter', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
| apache-2.0 | 6,607,153,752,943,887,000 | 58.980392 | 171 | 0.755009 | false |
naucoin/VTKSlicerWidgets | Utilities/mrmpi/examples/rmat.py | 11 | 4924 | #!/usr/local/bin/python
# ----------------------------------------------------------------------
# MR-MPI = MapReduce-MPI library
# http://www.cs.sandia.gov/~sjplimp/mapreduce.html
# Steve Plimpton, [email protected], Sandia National Laboratories
#
# Copyright (2009) Sandia Corporation. Under the terms of Contract
# DE-AC04-94AL85000 with Sandia Corporation, the U.S. Government retains
# certain rights in this software. This software is distributed under
# the modified Berkeley Software Distribution (BSD) License.
#
# See the README file in the top-level MapReduce directory.
# -------------------------------------------------------------------------
# MapReduce random RMAT matrix generation example in C++
# Syntax: rmat.py N Nz a b c d frac seed {outfile}
# 2^N = # of rows in RMAT matrix
# Nz = non-zeroes per row
# a,b,c,d = RMAT params (must sum to 1.0)
# frac = RMAT randomization param (frac < 1, 0 = no randomization)
# seed = RNG seed (positive int)
# outfile = output RMAT matrix to this filename (optional)
import sys, random
from mrmpi import mrmpi
try:
import pypar
except:
import pypar_serial as pypar
# generate RMAT matrix entries
# emit one KV per edge: key = edge, value = NULL
def generate(itask,mr):
for m in xrange(ngenerate):
delta = order / 2
a1 = a; b1 = b; c1 = c; d1 = d
i = j = 0
for ilevel in xrange(nlevels):
rn = random.random()
if rn < a1:
pass
elif rn < a1+b1:
j += delta
elif rn < a1+b1+c1:
i += delta
else:
i += delta
j += delta
delta /= 2
if fraction > 0.0:
a1 += a1*fraction * (drand48() - 0.5)
b1 += b1*fraction * (drand48() - 0.5)
c1 += c1*fraction * (drand48() - 0.5)
d1 += d1*fraction * (drand48() - 0.5)
total = a1+b1+c1+d1
a1 /= total
b1 /= total
c1 /= total
d1 /= total
mr.add((i,j),None)
# eliminate duplicate edges
# input: one KMV per edge, MV has multiple entries if duplicates exist
# output: one KV per edge: key = edge, value = NULL
def cull(key,mvalue,mr):
mr.add(key,None)
# write edges to a file unique to this processor
def output(key,mvalue,mr):
print >>fp,key[0]+1,key[1]+1,1
# enumerate nonzeroes in each row
# input: one KMV per edge
# output: one KV per edge: key = row I, value = NULL
def nonzero(key,mvalue,mr):
mr.add(key[0],None)
# count nonzeroes in each row
# input: one KMV per row, MV has entry for each nonzero
# output: one KV: key = # of nonzeroes, value = NULL
def degree(key,mvalue,mr):
mr.add(len(mvalue),None);
# count rows with same # of nonzeroes
# input: one KMV per nonzero count, MV has entry for each row
# output: one KV: key = # of nonzeroes, value = # of rows
def histo(key,mvalue,mr):
mr.add(key,len(mvalue))
# compare two counts
# order values by count, largest first
def ncompare(one,two):
if one > two: return -1;
elif one < two: return 1;
else: return 0;
# print # of rows with a specific # of nonzeroes
def stats(itask,key,value,mr):
global total
total += value;
print "%d rows with %d nonzeroes" % (value,key)
# main program
nprocs = pypar.size()
me = pypar.rank()
if len(sys.argv) != 9 and len(sys.argv) != 10:
if me == 0: print "Syntax: N Nz a b c d frac seed {outfile}"
sys.exit()
nlevels = int(sys.argv[1])
nnonzero = int(sys.argv[2])
a = float(sys.argv[3])
b = float(sys.argv[4])
c = float(sys.argv[5])
d = float(sys.argv[6])
fraction = float(sys.argv[7])
seed = int(sys.argv[8])
if len(sys.argv) == 10: outfile = sys.argv[9]
else: outfile = None
if a+b+c+d != 1.0:
if me == 0: print "ERROR: a,b,c,d must sum to 1"
sys.exit()
if fraction >= 1.0:
if me == 0: print "ERROR: fraction must be < 1"
sys.exit()
random.seed(seed+me)
order = 1 << nlevels
mr = mrmpi()
# loop until desired number of unique nonzero entries
pypar.barrier()
tstart = pypar.time()
niterate = 0
ntotal = (1 << nlevels) * nnonzero
nremain = ntotal
while nremain:
niterate += 1
ngenerate = nremain/nprocs
if me < nremain % nprocs: ngenerate += 1
mr.map(nprocs,generate,None,1)
nunique = mr.collate()
if nunique == ntotal: break
mr.reduce(cull)
nremain = ntotal - nunique
pypar.barrier()
tstop = pypar.time()
# output matrix if requested
if outfile:
fp = open(outfile + "." + str(me),"w")
if not fp:
print "ERROR: Could not open output file"
sys.exit()
mr2 = mr.copy()
mr2.reduce(output)
fp.close()
mr2.destroy()
# stats to screen
# include stats on number of nonzeroes per row
if me == 0:
print order,"rows in matrix"
print ntotal,"nonzeroes in matrix"
mr.reduce(nonzero)
mr.collate()
mr.reduce(degree)
mr.collate()
mr.reduce(histo)
mr.gather(1)
mr.sort_keys(ncompare)
total = 0
mr.map_kv(mr,stats)
if me == 0: print order-total,"rows with 0 nonzeroes"
if me == 0:
print "%g secs to generate matrix on %d procs in %d iterations" % \
(tstop-tstart,nprocs,niterate)
mr.destroy()
pypar.finalize()
| bsd-3-clause | 5,655,260,681,226,999,000 | 23.019512 | 75 | 0.637287 | false |
xsixing/blaze | samples/basics/array_evaluation.py | 10 | 1121 | """
Sample script showing the way to perform computations in blaze
This should be executable and result in an out of core execution to
generate the result of the expression
This illustrates the idea of:
- Using large in-disk arrays as operands
- Building expressions to evaluate in blaze
- Evaluate those expressions to produce a result
- Showing that we can configure how we evaluate expressions
- Showing how we can specify the kind of result we desire
"""
from __future__ import absolute_import, division, print_function
import blaze
def generate_operand(uri):
"""build some operands on disk"""
pass
def evaluation(operand_dict):
a = blaze.load(operand_dict['a'])
b = blaze.load(operand_dict['b'])
expr = (a+b)*(a*b)
print(type(expr)) # would this be "blaze.array"?
print(type(expr._data)) # would this be blaze.BlazeFuncDataDescriptor?
print(expr) # what should this print???
c = blaze.eval(expr, out_caps={}, hints={})
print(c) #should print the result... rejoice!
def main(argv):
pass
if __name__ == '___main___':
sys.exit(main(sys.argv))
| bsd-3-clause | 7,816,824,525,097,354,000 | 23.911111 | 74 | 0.688671 | false |
chenc10/Spark-PAF | dist/ec2/lib/boto-2.34.0/boto/ecs/__init__.py | 153 | 4177 | # Copyright (c) 2010 Chris Moyer http://coredumped.org/
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
import boto
from boto.connection import AWSQueryConnection, AWSAuthConnection
from boto.exception import BotoServerError
import time
import urllib
import xml.sax
from boto.ecs.item import ItemSet
from boto import handler
class ECSConnection(AWSQueryConnection):
"""
ECommerce Connection
For more information on how to use this module see:
http://blog.coredumped.org/2010/09/search-for-books-on-amazon-using-boto.html
"""
APIVersion = '2010-11-01'
def __init__(self, aws_access_key_id=None, aws_secret_access_key=None,
is_secure=True, port=None, proxy=None, proxy_port=None,
proxy_user=None, proxy_pass=None, host='ecs.amazonaws.com',
debug=0, https_connection_factory=None, path='/',
security_token=None, profile_name=None):
super(ECSConnection, self).__init__(aws_access_key_id, aws_secret_access_key,
is_secure, port, proxy, proxy_port, proxy_user, proxy_pass,
host, debug, https_connection_factory, path,
security_token=security_token,
profile_name=profile_name)
def _required_auth_capability(self):
return ['ecs']
def get_response(self, action, params, page=0, itemSet=None):
"""
Utility method to handle calls to ECS and parsing of responses.
"""
params['Service'] = "AWSECommerceService"
params['Operation'] = action
if page:
params['ItemPage'] = page
response = self.make_request(None, params, "/onca/xml")
body = response.read().decode('utf-8')
boto.log.debug(body)
if response.status != 200:
boto.log.error('%s %s' % (response.status, response.reason))
boto.log.error('%s' % body)
raise BotoServerError(response.status, response.reason, body)
if itemSet is None:
rs = ItemSet(self, action, params, page)
else:
rs = itemSet
h = handler.XmlHandler(rs, self)
xml.sax.parseString(body.encode('utf-8'), h)
if not rs.is_valid:
raise BotoServerError(response.status, '{Code}: {Message}'.format(**rs.errors[0]))
return rs
#
# Group methods
#
def item_search(self, search_index, **params):
"""
Returns items that satisfy the search criteria, including one or more search
indices.
For a full list of search terms,
:see: http://docs.amazonwebservices.com/AWSECommerceService/2010-09-01/DG/index.html?ItemSearch.html
"""
params['SearchIndex'] = search_index
return self.get_response('ItemSearch', params)
def item_lookup(self, **params):
"""
Returns items that satisfy the lookup query.
For a full list of parameters, see:
http://s3.amazonaws.com/awsdocs/Associates/2011-08-01/prod-adv-api-dg-2011-08-01.pdf
"""
return self.get_response('ItemLookup', params) | apache-2.0 | 1,580,100,225,348,895,000 | 38.790476 | 108 | 0.647594 | false |
mrkulk/text-world | evennia/server/oob_cmds.py | 2 | 14608 | """
Out-of-band default plugin commands available for OOB handler.
This module implements commands as defined by the MSDP standard
(http://tintin.sourceforge.net/msdp/), but is independent of the
actual transfer protocol (webclient, MSDP, GMCP etc). It also
implements several OOB commands unique to Evennia (both some
external and some for testing)
The available OOB commands can be extended by changing
`settings.OOB_PLUGIN_MODULES`
This module must contain a global dictionary CMD_MAP. This is a
dictionary that maps the call available in the OOB call to a function
in this module (this allows you to map multiple oob cmdnames to a
single actual Python function, for example).
For example, if the OOB strings received looks like this:
MDSP.LISTEN [desc, key] # GMCP (wrapping to MSDP)
LISTEN ARRAY VAL desc VAL key # MSDP
and CMD_MAP = {"LISTEN", listen} then this would result in a call to a
function "listen" in this module, with the arguments *("desc", "key").
oob functions have the following call signature:
function(session, *args, **kwargs)
where session is the active session and *args, **kwargs are extra
arguments sent with the oob command.
A function mapped to the key "oob_error" will retrieve error strings
if it is defined. It will get the error message as its 1st argument.
oob_error(session, error, *args, **kwargs)
This allows for customizing error handling.
Data is usually returned to the user via a return OOB call:
session.msg(oob=(oobcmdname, (args,), {kwargs}))
Oobcmdnames (like "MSDP.LISTEN" / "LISTEN" above) are case-sensitive.
Note that args, kwargs must be iterable. Non-iterables will be
interpreted as a new command name (you can send multiple oob commands
with one msg() call)
Evennia introduces two internal extensions to MSDP, and that is the
MSDP_ARRAY and MSDP_TABLE commands. These are never sent across the
wire to the client (so this is fully compliant with the MSDP
protocol), but tells the Evennia OOB Protocol that you want to send a
"bare" array or table to the client, without prepending any command
name.
"""
from django.conf import settings
from evennia.utils.utils import to_str
from evennia.server.oobhandler import OOB_HANDLER
_GA = object.__getattribute__
_SA = object.__setattr__
_NA = lambda o: "N/A"
#------------------------------------------------------------
# All OOB commands must be on the form
# cmdname(oobhandler, session, *args, **kwargs)
#------------------------------------------------------------
#
# General OOB commands
#
def oob_error(session, errmsg, *args, **kwargs):
"""
Error handling method. Error messages are relayed here.
Args:
session (Session): The session to receive the error
errmsg (str): The failure message
A function with this name is special and is also called by the
oobhandler when an error occurs already at the execution stage
(such as the oob function not being recognized or having the wrong
args etc). Call this from other oob functions to centralize error
management.
"""
session.msg(oob=("error", ("OOB ERROR: %s" % errmsg,)))
def oob_echo(session, *args, **kwargs):
"""
Test echo function. Echoes args, kwargs sent to it.
Args:
session (Session): The Session to receive the echo.
args (list of str): Echo text.
kwargs (dict of str, optional): Keyed echo text
"""
session.msg(oob=("echo", args, kwargs))
##OOB{"repeat":10}
def oob_repeat(session, oobfuncname, interval, *args, **kwargs):
"""
Called as REPEAT <oobfunc> <interval> <args>
Repeats a given OOB command with a certain frequency.
Args:
session (Session): Session creating the repeat
oobfuncname (str): OOB function called every interval seconds
interval (int): Interval of repeat, in seconds.
Notes:
The command checks so that it cannot repeat itself.
"""
if not oobfuncname:
oob_error(session, "Usage: REPEAT <oobfuncname>, <interval>")
return
# limit repeat actions to minimum 5 seconds interval
interval = 20 if not interval else (max(5, interval))
obj = session.get_puppet_or_player()
if obj and oobfuncname != "REPEAT":
OOB_HANDLER.add_repeater(obj, session.sessid, oobfuncname, interval, *args, **kwargs)
##OOB{"UNREPEAT":10}
def oob_unrepeat(session, oobfuncname, interval):
"""
Called with UNREPEAT <oobfunc> <interval>
Disable repeating callback.
Args:
session (Session): Session controlling the repeater
oobfuncname (str): OOB function called every interval seconds
interval (int): Interval of repeater, in seconds.
Notes:
The command checks so that it cannot repeat itself.
"""
obj = session.get_puppet_or_player()
if obj:
OOB_HANDLER.remove_repeater(obj, session.sessid, oobfuncname, interval)
#
# MSDP protocol standard commands
#
# MSDP suggests the following standard name conventions for making
# different properties available to the player
# "CHARACTER_NAME", "SERVER_ID", "SERVER_TIME", "AFFECTS", "ALIGNMENT", "EXPERIENCE", "EXPERIENCE_MAX", "EXPERIENCE_TNL",
# "HEALTH", "HEALTH_MAX", "LEVEL", "RACE", "CLASS", "MANA", "MANA_MAX", "WIMPY", "PRACTICE", "MONEY", "MOVEMENT",
# "MOVEMENT_MAX", "HITROLL", "DAMROLL", "AC", "STR", "INT", "WIS", "DEX", "CON", "OPPONENT_HEALTH", "OPPONENT_HEALTH_MAX",
# "OPPONENT_LEVEL", "OPPONENT_NAME", "AREA_NAME", "ROOM_EXITS", "ROOM_VNUM", "ROOM_NAME", "WORLD_TIME", "CLIENT_ID",
# "CLIENT_VERSION", "PLUGIN_ID", "ANSI_COLORS", "XTERM_256_COLORS", "UTF_8", "SOUND", "MXP", "BUTTON_1", "BUTTON_2",
# "BUTTON_3", "BUTTON_4", "BUTTON_5", "GAUGE_1", "GAUGE_2","GAUGE_3", "GAUGE_4", "GAUGE_5"
# mapping from MSDP standard names to Evennia variables
OOB_SENDABLE = {
"CHARACTER_NAME": lambda o: o.key,
"SERVER_ID": lambda o: settings.SERVERNAME,
"ROOM_NAME": lambda o: o.db_location.key,
"ANSI_COLORS": lambda o: True,
"XTERM_256_COLORS": lambda o: True,
"UTF_8": lambda o: True
}
##OOB{"SEND":"CHARACTER_NAME"} - from webclient
def oob_send(session, *args, **kwargs):
"""
Called with the SEND MSDP command.
This function directly returns the value of the given variable to
the session. It assumes the object on which the variable sits
belongs to the session.
Args:
session (Session): Session object
args (str): any number of properties to return. These
must belong to the OOB_SENDABLE dictionary.
Examples:
oob input: ("SEND", "CHARACTER_NAME", "SERVERNAME")
oob output: ("MSDP_TABLE", "CHARACTER_NAME", "Amanda",
"SERVERNAME", "Evennia")
"""
# mapping of MSDP name to a property
obj = session.get_puppet_or_player()
ret = {}
if obj:
for name in (a.upper() for a in args if a):
try:
#print "MSDP SEND inp:", name
value = OOB_SENDABLE.get(name, _NA)(obj)
ret[name] = value
except Exception, e:
ret[name] = str(e)
# return, make sure to use the right case
session.msg(oob=("MSDP_TABLE", (), ret))
else:
oob_error(session, "You must log in first.")
# mapping standard MSDP keys to Evennia field names
OOB_REPORTABLE = {
"CHARACTER_NAME": "db_key",
"ROOM_NAME": "db_location",
"TEST" : "test"
}
##OOB{"REPORT":"TEST"}
def oob_report(session, *args, **kwargs):
"""
Called with the `REPORT PROPNAME` MSDP command.
Monitors the changes of given property name. Assumes reporting
happens on an object controlled by the session.
Args:
session (Session): The Session doing the monitoring. The
property is assumed to sit on the entity currently
controlled by the Session. If puppeting, this is an
Object, otherwise the object will be the Player the
Session belongs to.
args (str or list): One or more property names to monitor changes in.
If a name starts with `db_`, the property is assumed to
be a field, otherwise an Attribute of the given name will
be monitored (if it exists).
Notes:
When the property updates, the monitor will send a MSDP_ARRAY
to the session of the form `(SEND, fieldname, new_value)`
Examples:
("REPORT", "CHARACTER_NAME")
("MSDP_TABLE", "CHARACTER_NAME", "Amanda")
"""
obj = session.get_puppet_or_player()
if obj:
ret = []
for name in args:
propname = OOB_REPORTABLE.get(name, None)
if not propname:
oob_error(session, "No Reportable property '%s'. Use LIST REPORTABLE_VARIABLES." % propname)
# the field_monitors require an oob function as a callback when they report a change.
elif propname.startswith("db_"):
OOB_HANDLER.add_field_monitor(obj, session.sessid, propname, "return_field_report")
ret.append(to_str(_GA(obj, propname), force_string=True))
else:
OOB_HANDLER.add_attribute_monitor(obj, session.sessid, propname, "return_attribute_report")
ret.append(_GA(obj, "db_value"))
#print "ret:", ret
session.msg(oob=("MSDP_ARRAY", ret))
else:
oob_error(session, "You must log in first.")
def oob_return_field_report(session, fieldname, obj, *args, **kwargs):
"""
This is a helper command called by the monitor when fieldname
changes. It is not part of the official MSDP specification but is
a callback used by the monitor to format the result before sending
it on.
"""
session.msg(oob=("MSDP_TABLE", (),
{fieldname: to_str(getattr(obj, fieldname), force_string=True)}))
def oob_return_attribute_report(session, fieldname, obj, *args, **kwargs):
"""
This is a helper command called by the monitor when an Attribute
changes. We need to handle this a little differently from fields
since we are generally not interested in the field name (it's
always db_value for Attributes) but the Attribute's name.
This command is not part of the official MSDP specification but is
a callback used by the monitor to format the result before sending
it on.
"""
session.msg(oob=("MSDP_TABLE", (),
{obj.db_key: to_str(getattr(obj, fieldname), force_string=True)}))
##OOB{"UNREPORT": "TEST"}
def oob_unreport(session, *args, **kwargs):
"""
This removes tracking for the given data.
"""
obj = session.get_puppet_or_player()
if obj:
for name in (a.upper() for a in args if a):
propname = OOB_REPORTABLE.get(name, None)
if not propname:
oob_error(session, "No Un-Reportable property '%s'. Use LIST REPORTABLE_VARIABLES." % propname)
elif propname.startswith("db_"):
OOB_HANDLER.remove_field_monitor(obj, session.sessid, propname, "oob_return_field_report")
else: # assume attribute
OOB_HANDLER.remove_attribute_monitor(obj, session.sessid, propname, "oob_return_attribute_report")
else:
oob_error(session, "You must log in first.")
##OOB{"LIST":"COMMANDS"}
def oob_list(session, mode, *args, **kwargs):
"""
Called with the `LIST <MODE>` MSDP command.
Args:
session (Session): The Session asking for the information
mode (str): The available properties. One of
"COMMANDS" Request an array of commands supported
by the server.
"LISTS" Request an array of lists supported
by the server.
"CONFIGURABLE_VARIABLES" Request an array of variables the client
can configure.
"REPORTABLE_VARIABLES" Request an array of variables the server
will report.
"REPORTED_VARIABLES" Request an array of variables currently
being reported.
"SENDABLE_VARIABLES" Request an array of variables the server
will send.
Examples:
oob in: LIST COMMANDS
oob out: (COMMANDS, (SEND, REPORT, LIST, ...)
"""
mode = mode.upper()
if mode == "COMMANDS":
session.msg(oob=("COMMANDS", ("LIST",
"REPORT",
"UNREPORT",
# "RESET",
"SEND")))
elif mode == "REPORTABLE_VARIABLES":
session.msg(oob=("REPORTABLE_VARIABLES", tuple(key for key in OOB_REPORTABLE.keys())))
elif mode == "REPORTED_VARIABLES":
# we need to check so as to use the right return value depending on if it is
# an Attribute (identified by tracking the db_value field) or a normal database field
# reported is a list of tuples (obj, propname, args, kwargs)
reported = OOB_HANDLER.get_all_monitors(session.sessid)
reported = [rep[0].key if rep[1] == "db_value" else rep[1] for rep in reported]
session.msg(oob=("REPORTED_VARIABLES", reported))
elif mode == "SENDABLE_VARIABLES":
session.msg(oob=("SENDABLE_VARIABLES", tuple(key for key in OOB_REPORTABLE.keys())))
elif mode == "CONFIGURABLE_VARIABLES":
# Not implemented (game specific)
oob_error(session, "Not implemented (game specific)")
else:
# mode == "LISTS" or not given
session.msg(oob=("LISTS",("REPORTABLE_VARIABLES",
"REPORTED_VARIABLES",
# "CONFIGURABLE_VARIABLES",
"SENDABLE_VARIABLES")))
#
# Cmd mapping
#
# this maps the commands to the names available to use from
# the oob call. The standard MSDP commands are capitalized
# as per the protocol, Evennia's own commands are not.
CMD_MAP = {"oob_error": oob_error, # will get error messages
"return_field_report": oob_return_field_report,
"return_attribute_report": oob_return_attribute_report,
# MSDP
"REPEAT": oob_repeat,
"UNREPEAT": oob_unrepeat,
"SEND": oob_send,
"ECHO": oob_echo,
"REPORT": oob_report,
"UNREPORT": oob_unreport,
"LIST": oob_list,
# GMCP
}
| bsd-3-clause | -2,748,601,004,682,188,300 | 37.240838 | 122 | 0.622262 | false |
margguo/python-ivi | ivi/agilent/agilentDSA91204A.py | 7 | 1632 | """
Python Interchangeable Virtual Instrument Library
Copyright (c) 2012-2014 Alex Forencich
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
from .agilent90000 import *
class agilentDSA91204A(agilent90000):
"Agilent Infiniium DSA91204A IVI oscilloscope driver"
def __init__(self, *args, **kwargs):
self.__dict__.setdefault('_instrument_id', 'DSO91204A')
super(agilentDSA91204A, self).__init__(*args, **kwargs)
self._analog_channel_count = 4
self._digital_channel_count = 0
self._channel_count = 4
self._bandwidth = 12e9
self._init_channels()
| mit | 298,840,500,654,370,900 | 36.090909 | 77 | 0.734681 | false |
qwhelan/asv | asv/results.py | 1 | 33074 | # -*- coding: utf-8 -*-
# Licensed under a 3-clause BSD style license - see LICENSE.rst
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import sys
import base64
import os
import re
import zlib
import itertools
import hashlib
import datetime
import collections
import six
from six.moves import zip as izip
from . import environment
from .console import log
from .machine import Machine
from . import statistics
from . import util
def iter_results_paths(results):
"""
Iterate over all of the result file paths.
"""
skip_files = set([
'machine.json', 'benchmarks.json'
])
for root, dirs, files in os.walk(results):
# Iterate over files only if machine.json is valid json
machine_json = os.path.join(root, "machine.json")
try:
data = util.load_json(machine_json, api_version=Machine.api_version)
machine_name = data.get('machine')
if not isinstance(machine_name, six.text_type):
raise util.UserError("malformed {0}".format(machine_json))
except util.UserError as err:
machine_json_err = "Skipping results: {0}".format(six.text_type(err))
except IOError as err:
machine_json_err = "Skipping results: could not load {0}".format(
machine_json)
else:
machine_json_err = None
# Iterate over files
for filename in files:
if filename not in skip_files and filename.endswith('.json'):
if machine_json_err is not None:
# Show the warning only if there are some files to load
log.warning(machine_json_err)
break
yield (root, filename, machine_name)
def iter_results(results):
"""
Iterate over all of the result files.
"""
for (root, filename, machine_name) in iter_results_paths(results):
try:
yield Results.load(os.path.join(root, filename), machine_name=machine_name)
except util.UserError as exc:
log.warning(six.text_type(exc))
def iter_results_for_machine(results, machine_name):
"""
Iterate over all of the result files for a particular machine.
"""
return iter_results(os.path.join(results, machine_name))
def iter_results_for_machine_and_hash(results, machine_name, commit):
"""
Iterate over all of the result files with a given hash for a
particular machine.
"""
full_commit = get_result_hash_from_prefix(results, machine_name, commit)
for (root, filename, machine_name) in iter_results_paths(
os.path.join(results, machine_name)):
results_commit = filename.split('-')[0]
if results_commit == full_commit:
try:
yield Results.load(os.path.join(root, filename), machine_name=machine_name)
except util.UserError as exc:
log.warning(six.text_type(exc))
def iter_existing_hashes(results):
"""
Iterate over all of the result commit hashes and dates and yields
commit_hash.
May return duplicates. Use `get_existing_hashes` if that matters.
"""
for result in iter_results(results):
yield result.commit_hash
def get_existing_hashes(results):
"""
Get a list of the commit hashes that have already been tested.
"""
log.info("Getting existing hashes")
hashes = list(set(iter_existing_hashes(results)))
return hashes
def get_result_hash_from_prefix(results, machine_name, commit_prefix):
"""
Get the 8-char result commit identifier from a potentially shorter
prefix. Only considers the set of commits that have had
results computed.
Returns None if there are no matches. Raises a UserError
if the prefix is non-unique.
"""
commits = set([])
path = os.path.join(results, machine_name)
for (root, filename, r_machine_name) in iter_results_paths(path):
if r_machine_name != machine_name:
log.warning("Skipping results '{0}': machine name is not '{1}'".format(
os.path.join(root, filename), machine_name))
continue
results_commit = filename.split('-')[0]
cmp_len = min(len(commit_prefix), len(results_commit))
if results_commit[:cmp_len] == commit_prefix[:cmp_len]:
commits.add(results_commit)
if len(commits) > 1:
commit_list_str = ', '.join(sorted(commits))
raise util.UserError('Git hash prefix could represent one of ' +
'multiple commits: {0}'.format(commit_list_str))
elif len(commits) == 1:
return list(commits)[0]
else:
return None
def get_filename(machine, commit_hash, env_name):
"""
Get the result filename for a given machine, commit_hash and
environment.
If the environment name is too long, use its hash instead.
"""
if env_name and len(env_name) >= 128:
env_name = "env-" + hashlib.md5(env_name.encode('utf-8')).hexdigest()
return os.path.join(
machine,
"{0}-{1}.json".format(
commit_hash[:8],
env_name))
def _compatible_results(result, result_params, params):
"""
For parameterized benchmarks, obtain values from *result* that
are compatible with parameters of *benchmark*
"""
if result is None:
# All results missing, eg. build failure
return [None for param in itertools.product(*params)]
# Pick results for those parameters that also appear in the
# current benchmark
old_results = {}
for param, value in izip(itertools.product(*result_params), result):
old_results[param] = value
new_results = []
for param in itertools.product(*params):
new_results.append(old_results.get(param))
return new_results
class Results(object):
"""
Manage a set of benchmark results for a single machine and commit
hash.
"""
api_version = 2
def __init__(self,
params,
requirements,
commit_hash,
date,
python,
env_name,
env_vars):
"""
Parameters
----------
params : dict
Parameters describing the environment in which the
benchmarks were run.
requirements : list
Requirements of the benchmarks being run.
commit_hash : str
The commit hash for the benchmark run.
date : int
JavaScript timestamp for when the commit was merged into
the repository.
python : str
A Python version specifier.
env_name : str
Environment name
env_vars: dict
Environment variables
"""
self._params = params
self._requirements = requirements
self._commit_hash = commit_hash
self._date = date
self._results = {}
self._samples = {}
self._stats = {}
self._benchmark_params = {}
self._profiles = {}
self._python = python
self._env_name = env_name
self._started_at = {}
self._duration = {}
self._benchmark_version = {}
self._env_vars = env_vars
# Note: stderr and errcode are not saved to files
self._stderr = {}
self._errcode = {}
if commit_hash is not None:
self._filename = get_filename(
params['machine'], self._commit_hash, env_name)
else:
self._filename = None
@classmethod
def unnamed(cls):
return cls({}, {}, None, None, None, None, {})
@property
def commit_hash(self):
return self._commit_hash
@property
def date(self):
return self._date
@property
def params(self):
return self._params
@property
def env_vars(self):
return self._env_vars
@property
def started_at(self):
return self._started_at
@property
def duration(self):
return self._duration
def set_build_duration(self, value):
self._duration["<build>"] = float(value)
def set_setup_cache_duration(self, setup_cache_key, value):
self._duration["<setup_cache {}>".format(setup_cache_key)] = float(value)
@property
def benchmark_version(self):
return self._benchmark_version
@property
def stderr(self):
return self._stderr
@property
def errcode(self):
return self._errcode
def get_all_result_keys(self):
"""
Return all available result keys.
"""
return six.iterkeys(self._results)
def get_result_keys(self, benchmarks):
"""
Return result keys corresponding to benchmarks.
Parameters
----------
benchmarks : Benchmarks
Benchmarks to return results for.
Used for checking benchmark versions.
Returns
-------
keys : set
Set of benchmark result keys
"""
keys = set()
for key in six.iterkeys(self._results):
if key not in benchmarks:
continue
version = self._benchmark_version.get(key)
bench_version = benchmarks[key].get('version')
if version is not None and version != bench_version:
continue
keys.add(key)
return keys
def get_result_value(self, key, params):
"""
Return the value of benchmark result.
Parameters
----------
key : str
Benchmark name to return results for
params : {list of list, None}
Set of benchmark parameters to return values for
Returns
-------
value : {float, list of float}
Benchmark result value. If the benchmark is parameterized, return
a list of values.
"""
return _compatible_results(self._results[key],
self._benchmark_params[key],
params)
def get_result_stats(self, key, params):
"""
Return the statistical information of a benchmark result.
Parameters
----------
key : str
Benchmark name to return results for
params : {list of list, None}
Set of benchmark parameters to return values for
Returns
-------
stats : {None, dict, list of dict}
Result statistics. If the benchmark is parameterized,
return a list of values.
"""
return _compatible_results(self._stats[key],
self._benchmark_params[key],
params)
def get_result_samples(self, key, params):
"""
Return the raw data points of a benchmark result.
Parameters
----------
key : str
Benchmark name to return results for
params : {list of list, None}
Set of benchmark parameters to return values for
Returns
-------
samples : {None, list}
Raw result samples. If the benchmark is parameterized,
return a list of values.
"""
return _compatible_results(self._samples[key],
self._benchmark_params[key],
params)
def get_result_params(self, key):
"""
Return the benchmark parameters of the given result
"""
return self._benchmark_params[key]
def remove_result(self, key):
"""
Remove results corresponding to a given benchmark.
"""
del self._results[key]
del self._benchmark_params[key]
del self._samples[key]
del self._stats[key]
# Remove profiles (may be missing)
self._profiles.pop(key, None)
# Remove run times (may be missing in old files)
self._started_at.pop(key, None)
self._duration.pop(key, None)
# Remove version (may be missing)
self._benchmark_version.pop(key, None)
def remove_samples(self, key, selected_idx=None):
"""
Remove measurement samples from the selected benchmark.
"""
if key not in self._results:
raise ValueError(key)
if selected_idx is None:
self._samples[key] = None
elif self._samples[key] is not None:
for j in selected_idx:
self._samples[key][j] = None
def add_result(self, benchmark, result,
started_at=None, duration=None,
record_samples=False,
append_samples=False,
selected_idx=None):
"""
Add benchmark result.
Parameters
----------
benchmark : dict
Benchmark object
result : runner.BenchmarkResult
Result of the benchmark.
started_at : datetime.datetime, optional
Benchmark start time.
duration : float, optional
Benchmark total duration in seconds.
record_samples : bool, optional
Whether to save samples.
append_samples : bool, optional
Whether to combine new samples with old.
selected_idx : set, optional
Which indices in a parametrized benchmark to update
"""
new_result = list(result.result)
new_samples = list(result.samples)
new_number = result.number
benchmark_name = benchmark['name']
benchmark_version = benchmark['version']
if started_at is None:
started_at = datetime.datetime.utcnow()
new_stats = [None] * len(new_result)
if (benchmark_name in self._results and
benchmark_version == self._benchmark_version.get(benchmark_name)):
# Append to old samples, if requested
if append_samples:
old_samples = self.get_result_samples(benchmark_name, benchmark['params'])
for j in range(len(new_samples)):
if old_samples[j] is not None and new_samples[j] is not None:
new_samples[j] = old_samples[j] + new_samples[j]
# Retain old result where requested
merge_idx = [j for j in range(len(new_result))
if selected_idx is not None and j not in selected_idx]
if merge_idx:
old_result = self.get_result_value(benchmark_name, benchmark['params'])
old_samples = self.get_result_samples(benchmark_name, benchmark['params'])
old_stats = self.get_result_stats(benchmark_name, benchmark['params'])
for j in merge_idx:
new_result[j] = old_result[j]
new_samples[j] = old_samples[j]
new_stats[j] = old_stats[j]
# Recompute stats for updated entries (and drop unnecessary data)
for j, (r, s, n) in enumerate(zip(new_result, new_samples, new_number)):
if util.is_na(r):
new_samples[j] = None
new_stats[j] = None
continue
if n is not None:
new_result[j], new_stats[j] = statistics.compute_stats(s, n)
# Compress None lists to just None
if all(x is None for x in new_result):
new_result = None
if all(x is None for x in new_samples):
new_samples = None
if all(x is None for x in new_stats):
new_stats = None
# Drop samples if requested
if not record_samples:
new_samples = None
# Store result
self._results[benchmark_name] = new_result
self._stats[benchmark_name] = new_stats
self._samples[benchmark_name] = new_samples
self._benchmark_params[benchmark_name] = benchmark['params'] if benchmark['params'] else []
self._started_at[benchmark_name] = util.datetime_to_js_timestamp(started_at)
if duration is None:
self._duration.pop(benchmark_name, None)
else:
self._duration[benchmark_name] = float(duration)
self._benchmark_version[benchmark_name] = benchmark_version
self._stderr[benchmark_name] = result.stderr
self._errcode[benchmark_name] = result.errcode
if result.profile:
profile_data = base64.b64encode(zlib.compress(result.profile))
if sys.version_info[0] >= 3:
profile_data = profile_data.decode('ascii')
self._profiles[benchmark_name] = profile_data
def get_profile(self, benchmark_name):
"""
Get the profile data for the given benchmark name.
Parameters
----------
benchmark_name : str
Name of benchmark
Returns
-------
profile_data : bytes
Raw profile data
"""
profile_data = self._profiles[benchmark_name]
if sys.version_info[0] >= 3:
profile_data = profile_data.encode('ascii')
return zlib.decompress(base64.b64decode(profile_data))
def has_profile(self, benchmark_name):
"""
Does the given benchmark data have profiling information?
"""
return benchmark_name in self._profiles
def save(self, result_dir):
"""
Save the results to disk, replacing existing results.
Parameters
----------
result_dir : str
Path to root of results tree.
"""
if self._filename is None:
raise ValueError("Cannot save unnamed Results")
path = os.path.join(result_dir, self._filename)
results = {}
simple_dict = {
'result': self._results,
'params': self._benchmark_params,
'version': self._benchmark_version,
'started_at': self._started_at,
'duration': self._duration,
'samples': self._samples,
'profile': self._profiles,
}
all_keys = ['result', 'params', 'version', 'started_at', 'duration',
'stats_ci_99_a', 'stats_ci_99_b', 'stats_q_25', 'stats_q_75',
'stats_number', 'stats_repeat', 'samples', 'profile']
for name in six.iterkeys(self._results):
row = []
for key in all_keys:
if key in simple_dict:
value = simple_dict[key].get(name)
else:
assert key[:6] == 'stats_'
z = self._stats[name]
if z is None:
value = None
else:
value = [x.get(key[6:]) if x is not None else None
for x in z]
if key != 'params':
if isinstance(value, list) and all(x is None for x in value):
value = None
if key.startswith('stats_') or key == 'duration':
value = util.truncate_float_list(value)
row.append(value)
while row and row[-1] is None:
row.pop()
results[name] = row
other_durations = {}
for key, value in six.iteritems(self._duration):
if key.startswith('<'):
other_durations[key] = value
data = {
'commit_hash': self._commit_hash,
'env_name': self._env_name,
'date': self._date,
'params': self._params,
'python': self._python,
'requirements': self._requirements,
'env_vars': self._env_vars,
'result_columns': all_keys,
'results': results,
'durations': other_durations,
}
util.write_json(path, data, self.api_version, compact=True)
def load_data(self, result_dir):
"""
Load previous results for the current parameters (if any).
"""
if self._filename is None:
raise ValueError("Cannot load unnamed Results")
path = os.path.join(result_dir, self._filename)
if os.path.isfile(path):
old = self.load(path)
for dict_name in ('_results', '_samples', '_stats', '_env_vars',
'_benchmark_params', '_profiles', '_started_at',
'_duration', '_benchmark_version'):
setattr(self, dict_name, getattr(old, dict_name))
@classmethod
def load(cls, path, machine_name=None):
"""
Load results from disk.
Parameters
----------
path : str
Path to results file.
machine_name : str, optional
If given, check that the results file is for the given machine.
"""
d = util.load_json(path, cls.api_version)
d.setdefault('env_vars', {})
try:
obj = cls(
d['params'],
d['requirements'],
d['commit_hash'],
d['date'],
d['python'],
d['env_name'],
d['env_vars'],
)
obj._results = {}
obj._samples = {}
obj._stats = {}
obj._benchmark_params = {}
obj._profiles = {}
obj._started_at = {}
obj._duration = d.get('durations', {})
obj._benchmark_version = {}
simple_keys = {
'result': obj._results,
'params': obj._benchmark_params,
'version': obj._benchmark_version,
'started_at': obj._started_at,
'duration': obj._duration,
'samples': obj._samples,
'profile': obj._profiles,
}
for name, key_values in six.iteritems(d['results']):
for key, value in zip(d['result_columns'], key_values):
key_dict = simple_keys.get(key)
if key_dict is not None:
key_dict[name] = value
continue
elif key.startswith('stats_'):
if value is not None:
if name not in obj._stats:
obj._stats[name] = [{}]*len(value)
stats_key = key[6:]
for j, v in enumerate(value):
if v is not None:
obj._stats[name][j][stats_key] = v
else:
raise KeyError("unknown data key {}".format(key))
for key_dict in simple_keys.values():
key_dict.setdefault(name, None)
obj._stats.setdefault(name, None)
obj._filename = os.path.join(*path.split(os.path.sep)[-2:])
except KeyError as exc:
raise util.UserError(
"Error loading results file '{0}': missing key {1}".format(
path, six.text_type(exc)))
if machine_name is not None and obj.params.get('machine') != machine_name:
raise util.UserError(
"Error loading results file '{0}': machine name is not '{1}'".format(
path, machine_name))
return obj
def rm(self, result_dir):
if self._filename is None:
raise ValueError("Cannot remove unnamed Results")
path = os.path.join(result_dir, self._filename)
os.remove(path)
@classmethod
def update(cls, path):
util.update_json(cls, path, cls.api_version, compact=True)
@property
def env_name(self):
return self._env_name
#
# Old data format support
#
@classmethod
def update_to_2(cls, d):
"""
Reformat data in api_version 1 format to version 2.
"""
try:
d2 = {}
d2['commit_hash'] = d['commit_hash']
d2['date'] = d['date']
d2['env_name'] = d.get('env_name',
environment.get_env_name('',
d['python'],
d['requirements'],
{}))
d2['params'] = d['params']
d2['python'] = d['python']
d2['requirements'] = d['requirements']
d2['env_vars'] = d.get('env_vars', {})
# Backward-compatible load
results = {}
samples = {}
stats = {}
benchmark_params = {}
for key, value in six.iteritems(d['results']):
# Backward compatibility
if not isinstance(value, dict):
value = {'result': [value], 'samples': None,
'stats': None, 'params': []}
if not isinstance(value['result'], list):
value['result'] = [value['result']]
if 'stats' in value and not isinstance(value['stats'], list):
value['stats'] = [value['stats']]
value.setdefault('samples', None)
value.setdefault('stats', None)
value.setdefault('params', [])
# Assign results
results[key] = value['result']
samples[key] = value['samples']
stats[key] = value['stats']
benchmark_params[key] = value['params']
if 'profiles' in d:
profiles = d['profiles']
else:
profiles = {}
started_at = d.get('started_at', {})
duration = d.get('duration', {})
benchmark_version = d.get('benchmark_version', {})
# Convert to new format
getters = [
('result', results, None),
('params', benchmark_params, None),
('version', benchmark_version, None),
('started_at', started_at, None),
('duration', duration, None),
('stats_ci_99_a', stats, lambda z: z['ci_99'][0]),
('stats_ci_99_b', stats, lambda z: z['ci_99'][1]),
('stats_q_25', stats, lambda z: z.get('q_25')),
('stats_q_75', stats, lambda z: z.get('q_75')),
('stats_number', stats, lambda z: z.get('number')),
('stats_repeat', stats, lambda z: z.get('repeat')),
('samples', samples, None),
('profile', profiles, None),
]
names = set()
for key_dict in (results, benchmark_params):
names.update(key_dict.keys())
d2['result_columns'] = [x[0] for x in getters]
d2['results'] = {}
for name in sorted(names):
r = []
for key_name, key_dict, key_getter in getters:
value = key_dict.get(name)
if key_getter is not None and value is not None:
if isinstance(value, list):
value = [key_getter(z) if z is not None else None
for z in value]
else:
value = key_getter(value)
if key_name.startswith('stats_') or key_name == 'duration':
value = util.truncate_float_list(value)
if key_name == 'params' and value is None:
value = []
if key_name != 'params' and isinstance(value, list):
if all(x is None for x in value):
value = None
r.append(value)
while r and r[-1] is None:
r.pop()
d2['results'][name] = r
d2['durations'] = {}
for key, value in six.iteritems(duration):
if key.startswith('<'):
d2['durations'][key] = value
return d2
except KeyError as exc:
raise util.UserError(
"Error loading results data: missing key {}".format(
six.text_type(exc)))
def format_benchmark_result(results, benchmark):
"""
Pretty-print a benchmark result to human-readable form.
Parameters
----------
results : Results
Result set object
benchmark : dict
Benchmark dictionary
Returns
-------
info : {str, None}
One-line description of results
details : {str, None}
Additional details
"""
name = benchmark['name']
result = results.get_result_value(name, benchmark['params'])
stats = results.get_result_stats(name, benchmark['params'])
total_count = len(result)
failure_count = sum(r is None for r in result)
info = None
details = None
# Display status
if failure_count > 0:
if failure_count == total_count:
info = "failed"
else:
info = "{0}/{1} failed".format(failure_count, total_count)
# Display results
if benchmark['params']:
# Long format display
if failure_count == 0:
info = "ok"
display_result = [(v, statistics.get_err(v, s) if s is not None else None)
for v, s in zip(result, stats)]
display = _format_benchmark_result(display_result, benchmark)
display = "\n".join(display).strip()
details = display
else:
if failure_count == 0:
# Failure already shown above
if not result:
display = "[]"
else:
if stats[0]:
err = statistics.get_err(result[0], stats[0])
else:
err = None
display = util.human_value(result[0], benchmark['unit'], err=err)
if len(result) > 1:
display += ";..."
info = display
return info, details
def _format_benchmark_result(result, benchmark, max_width=None):
"""
Format the result from a parameterized benchmark as an ASCII table
"""
if not result:
return ['[]']
def do_formatting(num_column_params):
# Fold result to a table
if num_column_params > 0:
column_params = benchmark['params'][-num_column_params:]
else:
column_params = []
rows = []
if column_params:
row_params = benchmark['params'][:-len(column_params)]
header = benchmark['param_names'][:len(row_params)]
column_param_permutations = list(itertools.product(*column_params))
header += [" / ".join(_format_param_value(value) for value in values)
for values in column_param_permutations]
rows.append(header)
column_items = len(column_param_permutations)
name_header = " / ".join(benchmark['param_names'][len(row_params):])
else:
column_items = 1
row_params = benchmark['params']
name_header = ""
header = benchmark['param_names']
rows.append(header)
for j, values in enumerate(itertools.product(*row_params)):
row_results = [util.human_value(x[0], benchmark['unit'], err=x[1])
for x in result[j*column_items:(j+1)*column_items]]
row = [_format_param_value(value) for value in values] + row_results
rows.append(row)
if name_header:
display = util.format_text_table(rows, 1,
top_header_text=name_header,
top_header_span_start=len(row_params))
else:
display = util.format_text_table(rows, 1)
return display.splitlines()
# Determine how many parameters can be fit to columns
if max_width is None:
max_width = util.get_terminal_width() * 3//4
text = do_formatting(0)
for j in range(1, len(benchmark['params'])):
new_text = do_formatting(j)
width = max(len(line) for line in new_text)
if width < max_width:
text = new_text
else:
break
return text
def _format_param_value(value_repr):
"""
Format a parameter value for displaying it as test output. The
values are string obtained via Python repr.
"""
regexs = ["^'(.+)'$",
"^u'(.+)'$",
"^<class '(.+)'>$"]
for regex in regexs:
m = re.match(regex, value_repr)
if m and m.group(1).strip():
return m.group(1)
return value_repr
| bsd-3-clause | -4,283,705,882,210,736,000 | 30.893925 | 99 | 0.52316 | false |
killbill/killbill-client-python | killbill/api/credit_api.py | 1 | 10462 | # coding: utf-8
#
# Copyright 2010-2014 Ning, Inc.
# Copyright 2014-2020 Groupon, Inc
# Copyright 2020-2021 Equinix, Inc
# Copyright 2014-2021 The Billing Project, LLC
#
# The Billing Project, LLC licenses this file to you under the Apache License, version 2.0
# (the "License"); you may not use this file except in compliance with the
# License. You may obtain a copy of the License at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""
Kill Bill
Kill Bill is an open-source billing and payments platform # noqa: E501
OpenAPI spec version: 0.22.22-SNAPSHOT
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from killbill.api_client import ApiClient
class CreditApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def create_credits(self, body=None, created_by=None, **kwargs): # noqa: E501
"""Create a credit # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_credits(body, created_by, async=True)
>>> result = thread.get()
:param async bool
:param List[InvoiceItem] body: (required)
:param Str created_by: (required)
:param Bool auto_commit:
:param List[Str] plugin_property:
:param Str reason:
:param Str comment:
:return: List[InvoiceItem]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.create_credits_with_http_info(body, created_by, **kwargs) # noqa: E501
else:
(data) = self.create_credits_with_http_info(body, created_by, **kwargs) # noqa: E501
return data
def create_credits_with_http_info(self, body=None, created_by=None, **kwargs): # noqa: E501
"""Create a credit # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_credits_with_http_info(body, created_by, async=True)
>>> result = thread.get()
:param async bool
:param List[InvoiceItem] body: (required)
:param Str created_by: (required)
:param Bool auto_commit:
:param List[Str] plugin_property:
:param Str reason:
:param Str comment:
:return: List[InvoiceItem]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'created_by', 'auto_commit', 'plugin_property', 'reason', 'comment'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_credits" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `create_credits`") # noqa: E501
# verify the required parameter 'created_by' is set
if ('created_by' not in params or
params['created_by'] is None):
raise ValueError("Missing the required parameter `created_by` when calling `create_credits`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'auto_commit' in params:
query_params.append(('autoCommit', params['auto_commit'])) # noqa: E501
if 'plugin_property' in params:
query_params.append(('pluginProperty', params['plugin_property'])) # noqa: E501
collection_formats['pluginProperty'] = 'multi' # noqa: E501
header_params = {}
if 'created_by' in params:
header_params['X-Killbill-CreatedBy'] = params['created_by'] # noqa: E501
if 'reason' in params:
header_params['X-Killbill-Reason'] = params['reason'] # noqa: E501
if 'comment' in params:
header_params['X-Killbill-Comment'] = params['comment'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Killbill Api Key', 'Killbill Api Secret', 'basicAuth'] # noqa: E501
return self.api_client.call_api(
'/1.0/kb/credits', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='List[InvoiceItem]', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_credit(self, credit_id=None, **kwargs): # noqa: E501
"""Retrieve a credit by id # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_credit(credit_id, async=True)
>>> result = thread.get()
:param async bool
:param Str credit_id: (required)
:return: InvoiceItem
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_credit_with_http_info(credit_id, **kwargs) # noqa: E501
else:
(data) = self.get_credit_with_http_info(credit_id, **kwargs) # noqa: E501
return data
def get_credit_with_http_info(self, credit_id=None, **kwargs): # noqa: E501
"""Retrieve a credit by id # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_credit_with_http_info(credit_id, async=True)
>>> result = thread.get()
:param async bool
:param Str credit_id: (required)
:return: InvoiceItem
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['credit_id'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_credit" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'credit_id' is set
if ('credit_id' not in params or
params['credit_id'] is None):
raise ValueError("Missing the required parameter `credit_id` when calling `get_credit`") # noqa: E501
if 'credit_id' in params and not re.search('\\w+-\\w+-\\w+-\\w+-\\w+', params['credit_id']): # noqa: E501
raise ValueError("Invalid value for parameter `credit_id` when calling `get_credit`, must conform to the pattern `/\\w+-\\w+-\\w+-\\w+-\\w+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'credit_id' in params:
path_params['creditId'] = params['credit_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Killbill Api Key', 'Killbill Api Secret', 'basicAuth'] # noqa: E501
return self.api_client.call_api(
'/1.0/kb/credits/{creditId}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InvoiceItem', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| apache-2.0 | 92,670,078,828,211,540 | 37.182482 | 169 | 0.589849 | false |
guewen/OpenUpgrade | addons/mass_mailing/models/mass_mailing_stats.py | 61 | 4455 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2013-today OpenERP SA (<http://www.openerp.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
#
##############################################################################
from openerp.osv import osv, fields
class MailMailStats(osv.Model):
""" MailMailStats models the statistics collected about emails. Those statistics
are stored in a separated model and table to avoid bloating the mail_mail table
with statistics values. This also allows to delete emails send with mass mailing
without loosing the statistics about them. """
_name = 'mail.mail.statistics'
_description = 'Email Statistics'
_rec_name = 'message_id'
_order = 'message_id'
_columns = {
'mail_mail_id': fields.many2one('mail.mail', 'Mail ID', ondelete='set null'),
'message_id': fields.char('Message-ID'),
'model': fields.char('Document model'),
'res_id': fields.integer('Document ID'),
# campaign / wave data
'mass_mailing_id': fields.many2one(
'mail.mass_mailing', 'Mass Mailing',
ondelete='set null',
),
'mass_mailing_campaign_id': fields.related(
'mass_mailing_id', 'mass_mailing_campaign_id',
type='many2one', ondelete='set null',
relation='mail.mass_mailing.campaign',
string='Mass Mailing Campaign',
store=True, readonly=True,
),
# Bounce and tracking
'scheduled': fields.datetime('Scheduled', help='Date when the email has been created'),
'sent': fields.datetime('Sent', help='Date when the email has been sent'),
'exception': fields.datetime('Exception', help='Date of technical error leading to the email not being sent'),
'opened': fields.datetime('Opened', help='Date when the email has been opened the first time'),
'replied': fields.datetime('Replied', help='Date when this email has been replied for the first time.'),
'bounced': fields.datetime('Bounced', help='Date when this email has bounced.'),
}
_defaults = {
'scheduled': fields.datetime.now,
}
def _get_ids(self, cr, uid, ids=None, mail_mail_ids=None, mail_message_ids=None, domain=None, context=None):
if not ids and mail_mail_ids:
base_domain = [('mail_mail_id', 'in', mail_mail_ids)]
elif not ids and mail_message_ids:
base_domain = [('message_id', 'in', mail_message_ids)]
else:
base_domain = [('id', 'in', ids or [])]
if domain:
base_domain = ['&'] + domain + base_domain
return self.search(cr, uid, base_domain, context=context)
def set_opened(self, cr, uid, ids=None, mail_mail_ids=None, mail_message_ids=None, context=None):
stat_ids = self._get_ids(cr, uid, ids, mail_mail_ids, mail_message_ids, [('opened', '=', False)], context)
self.write(cr, uid, stat_ids, {'opened': fields.datetime.now()}, context=context)
return stat_ids
def set_replied(self, cr, uid, ids=None, mail_mail_ids=None, mail_message_ids=None, context=None):
stat_ids = self._get_ids(cr, uid, ids, mail_mail_ids, mail_message_ids, [('replied', '=', False)], context)
self.write(cr, uid, stat_ids, {'replied': fields.datetime.now()}, context=context)
return stat_ids
def set_bounced(self, cr, uid, ids=None, mail_mail_ids=None, mail_message_ids=None, context=None):
stat_ids = self._get_ids(cr, uid, ids, mail_mail_ids, mail_message_ids, [('bounced', '=', False)], context)
self.write(cr, uid, stat_ids, {'bounced': fields.datetime.now()}, context=context)
return stat_ids
| agpl-3.0 | 2,950,543,937,023,360,000 | 49.05618 | 118 | 0.619978 | false |
JeffRoy/mi-dataset | mi/dataset/driver/wc_sbe/cspp/wc_sbe_cspp_recovered_driver.py | 1 | 2044 | #!/usr/bin/env python
"""
@package mi.dataset.driver.wc_sbe.cspp
@file mi/dataset/driver/wc_sbe/cspp/wc_sbe_cspp_recovered_driver.py
@author Jeff Roy
@brief Driver for the wc_sbe_cspp instrument
Release notes:
Initial Release
"""
from mi.dataset.dataset_parser import DataSetDriverConfigKeys
from mi.dataset.dataset_driver import SimpleDatasetDriver
from mi.dataset.parser.wc_sbe_cspp import \
WcSbeCsppParser, \
WcSbeEngRecoveredDataParticle, \
WcSbeMetadataRecoveredDataParticle
from mi.dataset.parser.cspp_base import \
METADATA_PARTICLE_CLASS_KEY, \
DATA_PARTICLE_CLASS_KEY
from mi.core.versioning import version
@version("15.6.1")
def parse(basePythonCodePath, sourceFilePath, particleDataHdlrObj):
"""
This is the method called by Uframe
:param basePythonCodePath This is the file system location of mi-dataset
:param sourceFilePath This is the full path and filename of the file to be parsed
:param particleDataHdlrObj Java Object to consume the output of the parser
:return particleDataHdlrObj
"""
with open(sourceFilePath, 'rU') as stream_handle:
# create and instance of the concrete driver class defined below
driver = WcSbeCsppRecoveredDriver(basePythonCodePath, stream_handle, particleDataHdlrObj)
driver.processFileStream()
return particleDataHdlrObj
class WcSbeCsppRecoveredDriver(SimpleDatasetDriver):
"""
Derived wc_sbe_cspp driver class
All this needs to do is create a concrete _build_parser method
"""
def _build_parser(self, stream_handle):
parser_config = {
DataSetDriverConfigKeys.PARTICLE_CLASS: None,
DataSetDriverConfigKeys.PARTICLE_CLASSES_DICT: {
METADATA_PARTICLE_CLASS_KEY: WcSbeMetadataRecoveredDataParticle,
DATA_PARTICLE_CLASS_KEY: WcSbeEngRecoveredDataParticle
}
}
parser = WcSbeCsppParser(parser_config, stream_handle,
self._exception_callback)
return parser
| bsd-2-clause | 4,576,780,628,206,238,000 | 29.969697 | 97 | 0.718689 | false |
m11s/MissionPlanner | Lib/rlcompleter.py | 61 | 6036 | """Word completion for GNU readline 2.0.
This requires the latest extension to the readline module. The completer
completes keywords, built-ins and globals in a selectable namespace (which
defaults to __main__); when completing NAME.NAME..., it evaluates (!) the
expression up to the last dot and completes its attributes.
It's very cool to do "import sys" type "sys.", hit the
completion key (twice), and see the list of names defined by the
sys module!
Tip: to use the tab key as the completion key, call
readline.parse_and_bind("tab: complete")
Notes:
- Exceptions raised by the completer function are *ignored* (and
generally cause the completion to fail). This is a feature -- since
readline sets the tty device in raw (or cbreak) mode, printing a
traceback wouldn't work well without some complicated hoopla to save,
reset and restore the tty state.
- The evaluation of the NAME.NAME... form may cause arbitrary
application defined code to be executed if an object with a
__getattr__ hook is found. Since it is the responsibility of the
application (or the user) to enable this feature, I consider this an
acceptable risk. More complicated expressions (e.g. function calls or
indexing operations) are *not* evaluated.
- GNU readline is also used by the built-in functions input() and
raw_input(), and thus these also benefit/suffer from the completer
features. Clearly an interactive application can benefit by
specifying its own completer function and using raw_input() for all
its input.
- When the original stdin is not a tty device, GNU readline is never
used, and this module (and the readline module) are silently inactive.
"""
import __builtin__
import __main__
__all__ = ["Completer"]
class Completer:
def __init__(self, namespace = None):
"""Create a new completer for the command line.
Completer([namespace]) -> completer instance.
If unspecified, the default namespace where completions are performed
is __main__ (technically, __main__.__dict__). Namespaces should be
given as dictionaries.
Completer instances should be used as the completion mechanism of
readline via the set_completer() call:
readline.set_completer(Completer(my_namespace).complete)
"""
if namespace and not isinstance(namespace, dict):
raise TypeError,'namespace must be a dictionary'
# Don't bind to namespace quite yet, but flag whether the user wants a
# specific namespace or to use __main__.__dict__. This will allow us
# to bind to __main__.__dict__ at completion time, not now.
if namespace is None:
self.use_main_ns = 1
else:
self.use_main_ns = 0
self.namespace = namespace
def complete(self, text, state):
"""Return the next possible completion for 'text'.
This is called successively with state == 0, 1, 2, ... until it
returns None. The completion should begin with 'text'.
"""
if self.use_main_ns:
self.namespace = __main__.__dict__
if state == 0:
if "." in text:
self.matches = self.attr_matches(text)
else:
self.matches = self.global_matches(text)
try:
return self.matches[state]
except IndexError:
return None
def _callable_postfix(self, val, word):
if hasattr(val, '__call__'):
word = word + "("
return word
def global_matches(self, text):
"""Compute matches when text is a simple name.
Return a list of all keywords, built-in functions and names currently
defined in self.namespace that match.
"""
import keyword
matches = []
n = len(text)
for word in keyword.kwlist:
if word[:n] == text:
matches.append(word)
for nspace in [__builtin__.__dict__, self.namespace]:
for word, val in nspace.items():
if word[:n] == text and word != "__builtins__":
matches.append(self._callable_postfix(val, word))
return matches
def attr_matches(self, text):
"""Compute matches when text contains a dot.
Assuming the text is of the form NAME.NAME....[NAME], and is
evaluatable in self.namespace, it will be evaluated and its attributes
(as revealed by dir()) are used as possible completions. (For class
instances, class members are also considered.)
WARNING: this can still invoke arbitrary C code, if an object
with a __getattr__ hook is evaluated.
"""
import re
m = re.match(r"(\w+(\.\w+)*)\.(\w*)", text)
if not m:
return []
expr, attr = m.group(1, 3)
try:
thisobject = eval(expr, self.namespace)
except Exception:
return []
# get the content of the object, except __builtins__
words = dir(thisobject)
if "__builtins__" in words:
words.remove("__builtins__")
if hasattr(thisobject, '__class__'):
words.append('__class__')
words.extend(get_class_members(thisobject.__class__))
matches = []
n = len(attr)
for word in words:
if word[:n] == attr and hasattr(thisobject, word):
val = getattr(thisobject, word)
word = self._callable_postfix(val, "%s.%s" % (expr, word))
matches.append(word)
return matches
def get_class_members(klass):
ret = dir(klass)
if hasattr(klass,'__bases__'):
for base in klass.__bases__:
ret = ret + get_class_members(base)
return ret
try:
import readline
except ImportError:
pass
else:
readline.set_completer(Completer().complete)
| gpl-3.0 | 1,220,361,793,271,662,600 | 33.505882 | 78 | 0.604374 | false |
caperren/Archives | OSU Robotics Club/Mars Rover 2017-2018/software/ros_packages/ground_station/src/Framework/MapSystems/RoverMapHelper.py | 1 | 1189 | import PIL.Image
import math
class MapHelper(object):
@staticmethod
def new_image(width, height, alpha=False):
"""
Generates a new image using PIL.Image module
returns PIL.IMAGE OBJECT
"""
if alpha is True:
return PIL.Image.new('RGBA', (width, height), (0, 0, 0, 0))
else:
return PIL.Image.new('RGBA', (width, height))
@staticmethod
def fast_round(value, precision):
"""
Function to round values instead of using python's
return INT
"""
return int(value * 10 ** precision) / 10. ** precision
@staticmethod
def pixels_to_degrees(pixels, zoom):
"""
Generates pixels to be expected at zoom levels
returns INT
"""
return pixels * 2 ** (21-zoom)
@staticmethod
def pixels_to_meters(latitude, zoom):
"""
Function generates how many pixels per meter it
should be from the projecction
returns FLOAT
"""
# https://groups.google.com/forum/#!topic/google-maps-js-api-v3/hDRO4oHVSeM
return 2 ** zoom / (156543.03392 * math.cos(math.radians(latitude)))
| gpl-3.0 | 6,997,406,587,076,255,000 | 24.847826 | 83 | 0.579479 | false |
ctmarinas/stgit | stgit/lib/git/objects.py | 1 | 10634 | # -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
import re
from stgit.compat import text
from stgit.config import config
from .base import Immutable
from .person import Person
class GitObject(Immutable):
"""Base class for all git objects. One git object is represented by at
most one C{GitObject}, which makes it possible to compare them
using normal Python object comparison; it also ensures we don't
waste more memory than necessary."""
class BlobData(Immutable):
"""Represents the data contents of a git blob object."""
def __init__(self, data):
assert isinstance(data, bytes)
self.bytes = data
def commit(self, repository):
"""Commit the blob.
@return: The committed blob
@rtype: L{Blob}"""
sha1 = (
repository.run(['git', 'hash-object', '-w', '--stdin'])
.encoding(None)
.raw_input(self.bytes)
.output_one_line()
)
return repository.get_blob(sha1)
class Blob(GitObject):
"""Represents a git blob object. All the actual data contents of the
blob object is stored in the L{data} member, which is a
L{BlobData} object."""
typename = 'blob'
default_perm = '100644'
def __init__(self, repository, sha1):
self._repository = repository
self.sha1 = sha1
def __repr__(self): # pragma: no cover
return 'Blob<%s>' % self.sha1
@property
def data(self):
type_, content = self._repository.cat_object(self.sha1)
assert type_ == 'blob', 'expected "blob", got "%s" for %s' % (type_, self.sha1)
return BlobData(content)
class TreeData(Immutable):
"""Represents the data contents of a git tree object."""
def __init__(self, entries):
"""Create a new L{TreeData} object from the given mapping from names
(strings) to either (I{permission}, I{object}) tuples or just
objects."""
self._entries = {}
for name, po in entries.items():
assert '/' not in name, 'tree entry name contains slash: %s' % name
if isinstance(po, GitObject):
perm, obj = po.default_perm, po
else:
perm, obj = po
self._entries[name] = (perm, obj)
def __getitem__(self, key):
return self._entries[key]
def __iter__(self):
for name, (perm, obj) in self._entries.items():
yield name, (perm, obj)
def commit(self, repository):
"""Commit the tree.
@return: The committed tree
@rtype: L{Tree}"""
listing = [
'%s %s %s\t%s' % (perm, obj.typename, obj.sha1, name)
for name, (perm, obj) in self
]
sha1 = (
repository.run(['git', 'mktree', '-z'])
.input_nulterm(listing)
.output_one_line()
)
return repository.get_tree(sha1)
@classmethod
def parse(cls, repository, lines):
"""Parse a raw git tree description.
@return: A new L{TreeData} object
@rtype: L{TreeData}"""
entries = {}
for line in lines:
m = re.match(r'^([0-7]{6}) ([a-z]+) ([0-9a-f]{40})\t(.*)$', line)
perm, type, sha1, name = m.groups()
entries[name] = (perm, repository.get_object(type, sha1))
return cls(entries)
class Tree(GitObject):
"""Represents a git tree object. All the actual data contents of the
tree object is stored in the L{data} member, which is a
L{TreeData} object."""
typename = 'tree'
default_perm = '040000'
def __init__(self, repository, sha1):
self.sha1 = sha1
self._repository = repository
self._data = None
@property
def data(self):
if self._data is None:
self._data = TreeData.parse(
self._repository,
self._repository.run(['git', 'ls-tree', '-z', self.sha1]).output_lines(
'\0'
),
)
return self._data
def __repr__(self): # pragma: no cover
return 'Tree<sha1: %s>' % self.sha1
class CommitData(Immutable):
"""Represents the data contents of a git commit object."""
def __init__(
self, tree, parents, message, encoding=None, author=None, committer=None
):
self.tree = tree
self.parents = parents
self.encoding = (
encoding if encoding is not None else config.get('i18n.commitencoding')
)
if isinstance(message, bytes):
self.message = message
else:
self.message = message.encode(self.encoding)
if author is None:
self._author = Person.author()
else:
assert isinstance(author, (Person, bytes))
self._author = author
if committer is None:
self._committer = Person.committer()
else:
assert isinstance(committer, (Person, bytes))
self._committer = committer
@property
def env(self):
env = {}
for p, v1 in [(self.author, 'AUTHOR'), (self.committer, 'COMMITTER')]:
if p is not None:
for attr, v2 in [
('name', 'NAME'),
('email', 'EMAIL'),
('date', 'DATE'),
]:
if getattr(p, attr) is not None:
env['GIT_%s_%s' % (v1, v2)] = text(getattr(p, attr))
return env
@property
def message_str(self):
return self.message.decode(self.encoding)
@property
def parent(self):
assert len(self.parents) == 1
return self.parents[0]
@property
def author(self):
if isinstance(self._author, bytes):
self._author = Person.parse(self._author.decode(self.encoding))
return self._author
@property
def committer(self):
if isinstance(self._committer, bytes):
self._committer = Person.parse(self._committer.decode(self.encoding))
return self._committer
def set_tree(self, tree):
return self._replace(tree=tree)
def set_parent(self, parent):
return self._replace(parents=[parent])
def set_author(self, author):
assert isinstance(author, Person) or author is None
return self._replace(author=author)
def set_committer(self, committer):
assert isinstance(committer, Person) or committer is None
return self._replace(committer=committer)
def set_message(self, message):
commit_encoding = config.get('i18n.commitencoding')
if isinstance(message, bytes):
message.decode(commit_encoding)
else:
message = message.encode(commit_encoding)
return self._replace(message=message, encoding=commit_encoding)
def _replace(self, **kws):
return type(self)(
tree=kws.get('tree', self.tree),
parents=kws.get('parents', self.parents),
message=kws.get('message', self.message),
encoding=kws.get('encoding', self.encoding),
author=kws.get('author', self.author),
committer=kws.get('committer', self.committer),
)
def is_nochange(self):
return len(self.parents) == 1 and self.tree == self.parent.data.tree
def __repr__(self): # pragma: no cover
return (
'CommitData<tree: %s, parents: %s, author: %s, committer: %s, '
'message: %s>'
) % (
self.tree.sha1,
[p.sha1 for p in self.parents],
self._author,
self._committer,
self.message.split(b'\n', 1)[0],
)
def commit(self, repository):
"""Commit the commit.
@return: The committed commit
@rtype: L{Commit}"""
c = ['git', 'commit-tree', self.tree.sha1]
for p in self.parents:
c.append('-p')
c.append(p.sha1)
sha1 = (
repository.run(c, env=self.env)
.encoding(None)
.raw_input(self.message)
.output_one_line()
)
return repository.get_commit(sha1)
@classmethod
def parse(cls, repository, content):
"""Parse a raw git commit description.
@return: A new L{CommitData} object
@rtype: L{CommitData}"""
required_keys = set(['tree', 'author', 'committer'])
parents = []
encoding = None
while True:
line, content = content.split(b'\n', 1)
if line:
while content.startswith(b' '):
extended, content = content.split(b'\n', 1)
line += extended[1:]
key_b, value_b = line.split(b' ', 1)
key = key_b.decode('utf-8')
if key == 'tree':
tree = repository.get_tree(value_b.decode('utf-8'))
required_keys.remove(key)
elif key == 'parent':
parents.append(repository.get_commit(value_b.decode('utf-8')))
elif key == 'author':
author = value_b
required_keys.remove(key)
elif key == 'committer':
committer = value_b
required_keys.remove(key)
elif key == 'encoding':
encoding = value_b.decode('utf-8')
else:
# Any other keys are meant to be explicitly ignored
pass
else:
break
assert not required_keys, 'commit data missing keys %s' % required_keys
return cls(tree, parents, content, encoding, author, committer)
class Commit(GitObject):
"""Represents a git commit object. All the actual data contents of the
commit object is stored in the L{data} member, which is a
L{CommitData} object."""
typename = 'commit'
def __init__(self, repository, sha1):
self.sha1 = sha1
self._repository = repository
self._data = None
@property
def data(self):
if self._data is None:
type_, content = self._repository.cat_object(self.sha1)
assert type_ == 'commit', 'expected "commit", got "%s" for %s' % (
type_,
self.sha1,
)
self._data = CommitData.parse(self._repository, content)
return self._data
def __repr__(self): # pragma: no cover
return 'Commit<sha1: %s, data: %s>' % (self.sha1, self._data)
| gpl-2.0 | -1,564,125,830,605,245,400 | 31.03012 | 87 | 0.544574 | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.