repo_name
stringlengths 6
100
| path
stringlengths 4
294
| copies
stringlengths 1
5
| size
stringlengths 4
6
| content
stringlengths 606
896k
| license
stringclasses 15
values |
---|---|---|---|---|---|
slz/delidded-kernel-n900t-note3
|
tools/perf/scripts/python/sched-migration.py
|
11215
|
11670
|
#!/usr/bin/python
#
# Cpu task migration overview toy
#
# Copyright (C) 2010 Frederic Weisbecker <[email protected]>
#
# perf script event handlers have been generated by perf script -g python
#
# This software is distributed under the terms of the GNU General
# Public License ("GPL") version 2 as published by the Free Software
# Foundation.
import os
import sys
from collections import defaultdict
from UserList import UserList
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
sys.path.append('scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from SchedGui import *
threads = { 0 : "idle"}
def thread_name(pid):
return "%s:%d" % (threads[pid], pid)
class RunqueueEventUnknown:
@staticmethod
def color():
return None
def __repr__(self):
return "unknown"
class RunqueueEventSleep:
@staticmethod
def color():
return (0, 0, 0xff)
def __init__(self, sleeper):
self.sleeper = sleeper
def __repr__(self):
return "%s gone to sleep" % thread_name(self.sleeper)
class RunqueueEventWakeup:
@staticmethod
def color():
return (0xff, 0xff, 0)
def __init__(self, wakee):
self.wakee = wakee
def __repr__(self):
return "%s woke up" % thread_name(self.wakee)
class RunqueueEventFork:
@staticmethod
def color():
return (0, 0xff, 0)
def __init__(self, child):
self.child = child
def __repr__(self):
return "new forked task %s" % thread_name(self.child)
class RunqueueMigrateIn:
@staticmethod
def color():
return (0, 0xf0, 0xff)
def __init__(self, new):
self.new = new
def __repr__(self):
return "task migrated in %s" % thread_name(self.new)
class RunqueueMigrateOut:
@staticmethod
def color():
return (0xff, 0, 0xff)
def __init__(self, old):
self.old = old
def __repr__(self):
return "task migrated out %s" % thread_name(self.old)
class RunqueueSnapshot:
def __init__(self, tasks = [0], event = RunqueueEventUnknown()):
self.tasks = tuple(tasks)
self.event = event
def sched_switch(self, prev, prev_state, next):
event = RunqueueEventUnknown()
if taskState(prev_state) == "R" and next in self.tasks \
and prev in self.tasks:
return self
if taskState(prev_state) != "R":
event = RunqueueEventSleep(prev)
next_tasks = list(self.tasks[:])
if prev in self.tasks:
if taskState(prev_state) != "R":
next_tasks.remove(prev)
elif taskState(prev_state) == "R":
next_tasks.append(prev)
if next not in next_tasks:
next_tasks.append(next)
return RunqueueSnapshot(next_tasks, event)
def migrate_out(self, old):
if old not in self.tasks:
return self
next_tasks = [task for task in self.tasks if task != old]
return RunqueueSnapshot(next_tasks, RunqueueMigrateOut(old))
def __migrate_in(self, new, event):
if new in self.tasks:
self.event = event
return self
next_tasks = self.tasks[:] + tuple([new])
return RunqueueSnapshot(next_tasks, event)
def migrate_in(self, new):
return self.__migrate_in(new, RunqueueMigrateIn(new))
def wake_up(self, new):
return self.__migrate_in(new, RunqueueEventWakeup(new))
def wake_up_new(self, new):
return self.__migrate_in(new, RunqueueEventFork(new))
def load(self):
""" Provide the number of tasks on the runqueue.
Don't count idle"""
return len(self.tasks) - 1
def __repr__(self):
ret = self.tasks.__repr__()
ret += self.origin_tostring()
return ret
class TimeSlice:
def __init__(self, start, prev):
self.start = start
self.prev = prev
self.end = start
# cpus that triggered the event
self.event_cpus = []
if prev is not None:
self.total_load = prev.total_load
self.rqs = prev.rqs.copy()
else:
self.rqs = defaultdict(RunqueueSnapshot)
self.total_load = 0
def __update_total_load(self, old_rq, new_rq):
diff = new_rq.load() - old_rq.load()
self.total_load += diff
def sched_switch(self, ts_list, prev, prev_state, next, cpu):
old_rq = self.prev.rqs[cpu]
new_rq = old_rq.sched_switch(prev, prev_state, next)
if old_rq is new_rq:
return
self.rqs[cpu] = new_rq
self.__update_total_load(old_rq, new_rq)
ts_list.append(self)
self.event_cpus = [cpu]
def migrate(self, ts_list, new, old_cpu, new_cpu):
if old_cpu == new_cpu:
return
old_rq = self.prev.rqs[old_cpu]
out_rq = old_rq.migrate_out(new)
self.rqs[old_cpu] = out_rq
self.__update_total_load(old_rq, out_rq)
new_rq = self.prev.rqs[new_cpu]
in_rq = new_rq.migrate_in(new)
self.rqs[new_cpu] = in_rq
self.__update_total_load(new_rq, in_rq)
ts_list.append(self)
if old_rq is not out_rq:
self.event_cpus.append(old_cpu)
self.event_cpus.append(new_cpu)
def wake_up(self, ts_list, pid, cpu, fork):
old_rq = self.prev.rqs[cpu]
if fork:
new_rq = old_rq.wake_up_new(pid)
else:
new_rq = old_rq.wake_up(pid)
if new_rq is old_rq:
return
self.rqs[cpu] = new_rq
self.__update_total_load(old_rq, new_rq)
ts_list.append(self)
self.event_cpus = [cpu]
def next(self, t):
self.end = t
return TimeSlice(t, self)
class TimeSliceList(UserList):
def __init__(self, arg = []):
self.data = arg
def get_time_slice(self, ts):
if len(self.data) == 0:
slice = TimeSlice(ts, TimeSlice(-1, None))
else:
slice = self.data[-1].next(ts)
return slice
def find_time_slice(self, ts):
start = 0
end = len(self.data)
found = -1
searching = True
while searching:
if start == end or start == end - 1:
searching = False
i = (end + start) / 2
if self.data[i].start <= ts and self.data[i].end >= ts:
found = i
end = i
continue
if self.data[i].end < ts:
start = i
elif self.data[i].start > ts:
end = i
return found
def set_root_win(self, win):
self.root_win = win
def mouse_down(self, cpu, t):
idx = self.find_time_slice(t)
if idx == -1:
return
ts = self[idx]
rq = ts.rqs[cpu]
raw = "CPU: %d\n" % cpu
raw += "Last event : %s\n" % rq.event.__repr__()
raw += "Timestamp : %d.%06d\n" % (ts.start / (10 ** 9), (ts.start % (10 ** 9)) / 1000)
raw += "Duration : %6d us\n" % ((ts.end - ts.start) / (10 ** 6))
raw += "Load = %d\n" % rq.load()
for t in rq.tasks:
raw += "%s \n" % thread_name(t)
self.root_win.update_summary(raw)
def update_rectangle_cpu(self, slice, cpu):
rq = slice.rqs[cpu]
if slice.total_load != 0:
load_rate = rq.load() / float(slice.total_load)
else:
load_rate = 0
red_power = int(0xff - (0xff * load_rate))
color = (0xff, red_power, red_power)
top_color = None
if cpu in slice.event_cpus:
top_color = rq.event.color()
self.root_win.paint_rectangle_zone(cpu, color, top_color, slice.start, slice.end)
def fill_zone(self, start, end):
i = self.find_time_slice(start)
if i == -1:
return
for i in xrange(i, len(self.data)):
timeslice = self.data[i]
if timeslice.start > end:
return
for cpu in timeslice.rqs:
self.update_rectangle_cpu(timeslice, cpu)
def interval(self):
if len(self.data) == 0:
return (0, 0)
return (self.data[0].start, self.data[-1].end)
def nr_rectangles(self):
last_ts = self.data[-1]
max_cpu = 0
for cpu in last_ts.rqs:
if cpu > max_cpu:
max_cpu = cpu
return max_cpu
class SchedEventProxy:
def __init__(self):
self.current_tsk = defaultdict(lambda : -1)
self.timeslices = TimeSliceList()
def sched_switch(self, headers, prev_comm, prev_pid, prev_prio, prev_state,
next_comm, next_pid, next_prio):
""" Ensure the task we sched out this cpu is really the one
we logged. Otherwise we may have missed traces """
on_cpu_task = self.current_tsk[headers.cpu]
if on_cpu_task != -1 and on_cpu_task != prev_pid:
print "Sched switch event rejected ts: %s cpu: %d prev: %s(%d) next: %s(%d)" % \
(headers.ts_format(), headers.cpu, prev_comm, prev_pid, next_comm, next_pid)
threads[prev_pid] = prev_comm
threads[next_pid] = next_comm
self.current_tsk[headers.cpu] = next_pid
ts = self.timeslices.get_time_slice(headers.ts())
ts.sched_switch(self.timeslices, prev_pid, prev_state, next_pid, headers.cpu)
def migrate(self, headers, pid, prio, orig_cpu, dest_cpu):
ts = self.timeslices.get_time_slice(headers.ts())
ts.migrate(self.timeslices, pid, orig_cpu, dest_cpu)
def wake_up(self, headers, comm, pid, success, target_cpu, fork):
if success == 0:
return
ts = self.timeslices.get_time_slice(headers.ts())
ts.wake_up(self.timeslices, pid, target_cpu, fork)
def trace_begin():
global parser
parser = SchedEventProxy()
def trace_end():
app = wx.App(False)
timeslices = parser.timeslices
frame = RootFrame(timeslices, "Migration")
app.MainLoop()
def sched__sched_stat_runtime(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, runtime, vruntime):
pass
def sched__sched_stat_iowait(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, delay):
pass
def sched__sched_stat_sleep(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, delay):
pass
def sched__sched_stat_wait(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, delay):
pass
def sched__sched_process_fork(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
parent_comm, parent_pid, child_comm, child_pid):
pass
def sched__sched_process_wait(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, prio):
pass
def sched__sched_process_exit(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, prio):
pass
def sched__sched_process_free(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, prio):
pass
def sched__sched_migrate_task(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, prio, orig_cpu,
dest_cpu):
headers = EventHeaders(common_cpu, common_secs, common_nsecs,
common_pid, common_comm)
parser.migrate(headers, pid, prio, orig_cpu, dest_cpu)
def sched__sched_switch(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
prev_comm, prev_pid, prev_prio, prev_state,
next_comm, next_pid, next_prio):
headers = EventHeaders(common_cpu, common_secs, common_nsecs,
common_pid, common_comm)
parser.sched_switch(headers, prev_comm, prev_pid, prev_prio, prev_state,
next_comm, next_pid, next_prio)
def sched__sched_wakeup_new(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, prio, success,
target_cpu):
headers = EventHeaders(common_cpu, common_secs, common_nsecs,
common_pid, common_comm)
parser.wake_up(headers, comm, pid, success, target_cpu, 1)
def sched__sched_wakeup(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, prio, success,
target_cpu):
headers = EventHeaders(common_cpu, common_secs, common_nsecs,
common_pid, common_comm)
parser.wake_up(headers, comm, pid, success, target_cpu, 0)
def sched__sched_wait_task(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, prio):
pass
def sched__sched_kthread_stop_ret(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
ret):
pass
def sched__sched_kthread_stop(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid):
pass
def trace_unhandled(event_name, context, common_cpu, common_secs, common_nsecs,
common_pid, common_comm):
pass
|
gpl-2.0
|
gangadhar-kadam/prjapp
|
hr/doctype/leave_control_panel/leave_control_panel.py
|
30
|
2230
|
# Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import webnotes
from webnotes.utils import cint, cstr, flt, nowdate
from webnotes.model.doc import Document
from webnotes.model.code import get_obj
from webnotes import msgprint
class DocType:
def __init__(self, doc, doclist):
self.doc = doc
self.doclist = doclist
# Get Employees
# **********************************************************************
def get_employees(self):
lst1 = [[self.doc.employee_type,"employment_type"],[self.doc.branch,"branch"],[self.doc.designation,"designation"],[self.doc.department, "department"],[self.doc.grade,"grade"]]
condition = "where "
flag = 0
for l in lst1:
if(l[0]):
if flag == 0:
condition += l[1] + "= '" + l[0] +"'"
else:
condition += " and " + l[1]+ "= '" +l[0] +"'"
flag = 1
emp_query = "select name from `tabEmployee` "
if flag == 1:
emp_query += condition
e = webnotes.conn.sql(emp_query)
return e
# ----------------
# validate values
# ----------------
def validate_values(self):
val_dict = {self.doc.fiscal_year:'Fiscal Year', self.doc.leave_type:'Leave Type', self.doc.no_of_days:'New Leaves Allocated'}
for d in val_dict:
if not d:
msgprint("Please enter : "+val_dict[d])
raise Exception
# Allocation
# **********************************************************************
def allocate_leave(self):
self.validate_values()
for d in self.get_employees():
la = Document('Leave Allocation')
la.employee = cstr(d[0])
la.employee_name = webnotes.conn.get_value('Employee',cstr(d[0]),'employee_name')
la.leave_type = self.doc.leave_type
la.fiscal_year = self.doc.fiscal_year
la.posting_date = nowdate()
la.carry_forward = cint(self.doc.carry_forward)
la.new_leaves_allocated = flt(self.doc.no_of_days)
la_obj = get_obj(doc=la)
la_obj.doc.docstatus = 1
la_obj.validate()
la_obj.on_update()
la_obj.doc.save(1)
msgprint("Leaves Allocated Successfully")
|
agpl-3.0
|
tchakravarty/PyMurphy
|
Archive/pmtk3-master/python/utils.py
|
7
|
4579
|
#!/usr/bin/env python
import os
import scipy.io as sio
import numpy as np
import glob
PYTHON_DIR = os.path.dirname(os.path.realpath(__file__))
DATA_DIR = os.path.join(os.path.dirname(PYTHON_DIR), 'pmtkdataCopy')
def add_ones(X):
"""Add a column of ones to X"""
n = len(X)
return np.column_stack((np.ones(n), X))
def degexpand(X, deg, add_ones=False):
"""Expand input vectors to contain powers of the input features"""
n = len(X)
xx = X
for i in xrange(1, deg):
xx = np.column_stack((xx, np.power(X, i + 1)))
if add_ones:
xx = np.column_stack((np.ones(n), xx))
return xx
def rescale_data(X, min_val=-1, max_val=1, minx=None, rangex=None):
"""
Rescale columns to lie in the range
[min_val, max_val] (defaults to [-1,1]])
"""
if minx is None:
minx = X.min(axis=0)
if rangex is None:
rangex = X.max(axis=0) - X.min(axis=0)
return (max_val - min_val) * (X - minx) / rangex + min_val
def center_cols(X, mu=None):
"""
Make each column be zero mean
"""
if mu is None:
mu = X.mean(axis=0)
return X - mu, mu
def mk_unit_variance(X, s=None):
"""
Make each column of X be variance 1
"""
if s is None:
s = X.std(axis=0)
try:
len(s)
s[s < np.spacing(1)] = 1
except TypeError:
s = s if s > np.spacing(1) else 1
return X / s, s
class preprocessor_create():
def __init__(self, standardize_X=False, rescale_X=False, kernel_fn=None,
poly=None, add_ones=False):
self.standardize_X = standardize_X
self.rescale_X = rescale_X
self.kernel_fn = kernel_fn
self.poly = poly
self.add_ones = add_ones
def poly_data_make(sampling="sparse", deg=3, n=21):
"""
Create an artificial dataset
"""
np.random.seed(0)
if sampling == "irregular":
xtrain = np.concatenate(
(np.arange(-1, -0.5, 0.1), np.arange(3, 3.5, 0.1)))
elif sampling == "sparse":
xtrain = np.array([-3, -2, 0, 2, 3])
elif sampling == "dense":
xtrain = np.arange(-5, 5, 0.6)
elif sampling == "thibaux":
xtrain = np.linspace(0, 20, n)
xtest = np.arange(0, 20, 0.1)
sigma2 = 4
w = np.array([-1.5, 1/9.])
fun = lambda x: w[0]*x + w[1]*np.square(x)
if sampling != "thibaux":
assert deg < 4, "bad degree, dude %d" % deg
xtest = np.arange(-7, 7, 0.1)
if deg == 2:
fun = lambda x: (10 + x + np.square(x))
else:
fun = lambda x: (10 + x + np.power(x, 3))
sigma2 = np.square(5)
ytrain = fun(xtrain) + np.random.normal(0, 1, xtrain.shape) * \
np.sqrt(sigma2)
ytestNoisefree = fun(xtest)
ytestNoisy = ytestNoisefree + np.random.normal(0, 1, xtest.shape) * \
np.sqrt(sigma2)
return xtrain, ytrain, xtest, ytestNoisefree, ytestNoisy, sigma2
def load_mat(matName):
"""look for the .mat file in pmtk3/pmtkdataCopy/
currently only support .mat files create by Matlab 5,6,7~7.2,
"""
try:
data = sio.loadmat(os.path.join(DATA_DIR, matName))
except NotImplementedError:
raise
except FileNotFoundError:
raise
return data
def generate_rst():
"""generate chX.rst in current working directory"""
cwd = os.getcwd()
demo_dir = os.path.join(cwd, 'demos')
chapters = os.listdir(demo_dir)
for chapter in chapters:
if not os.path.isdir(os.path.join(demo_dir, chapter)):
continue
reg_py = os.path.join(demo_dir, chapter, '*.py')
scripts = glob.glob(reg_py)
rst_file = chapter + '.rst'
rst_file = os.path.join(demo_dir, chapter, rst_file)
with open(rst_file, 'w') as f:
f.write(chapter)
f.write('\n========================================\n')
for script in scripts:
script_name = os.path.basename(script)
f.write('\n' + script_name[:-3])
f.write('\n----------------------------------------\n')
reg_png = os.path.join(demo_dir,
chapter,
script_name[:-3] + '*.png')
for img in glob.glob(reg_png):
img_name = os.path.basename(img)
f.write(".. image:: " + img_name + "\n")
f.write(".. literalinclude:: " + script_name + "\n")
if __name__ == '__main__':
generate_rst()
print("Finished generate chX.rst!")
|
mit
|
Teamxrtc/webrtc-streaming-node
|
third_party/depot_tools/external_bin/gsutil/gsutil_4.15/gsutil/third_party/httplib2/python3/httplib2/__init__.py
|
29
|
56301
|
"""
httplib2
A caching http interface that supports ETags and gzip
to conserve bandwidth.
Requires Python 3.0 or later
Changelog:
2009-05-28, Pilgrim: ported to Python 3
2007-08-18, Rick: Modified so it's able to use a socks proxy if needed.
"""
__author__ = "Joe Gregorio ([email protected])"
__copyright__ = "Copyright 2006, Joe Gregorio"
__contributors__ = ["Thomas Broyer ([email protected])",
"James Antill",
"Xavier Verges Farrero",
"Jonathan Feinberg",
"Blair Zajac",
"Sam Ruby",
"Louis Nyffenegger",
"Mark Pilgrim"]
__license__ = "MIT"
__version__ = "0.7.7"
import re
import sys
import email
import email.utils
import email.message
import email.feedparser
import io
import gzip
import zlib
import http.client
import urllib.parse
import base64
import os
import copy
import calendar
import time
import random
import errno
from hashlib import sha1 as _sha, md5 as _md5
import hmac
from gettext import gettext as _
import socket
import ssl
_ssl_wrap_socket = ssl.wrap_socket
try:
import socks
except ImportError:
socks = None
from .iri2uri import iri2uri
def has_timeout(timeout):
if hasattr(socket, '_GLOBAL_DEFAULT_TIMEOUT'):
return (timeout is not None and timeout is not socket._GLOBAL_DEFAULT_TIMEOUT)
return (timeout is not None)
__all__ = ['Http', 'Response', 'ProxyInfo', 'HttpLib2Error',
'RedirectMissingLocation', 'RedirectLimit',
'FailedToDecompressContent', 'UnimplementedDigestAuthOptionError',
'UnimplementedHmacDigestAuthOptionError',
'debuglevel', 'RETRIES']
# The httplib debug level, set to a non-zero value to get debug output
debuglevel = 0
# A request will be tried 'RETRIES' times if it fails at the socket/connection level.
RETRIES = 2
# All exceptions raised here derive from HttpLib2Error
class HttpLib2Error(Exception): pass
# Some exceptions can be caught and optionally
# be turned back into responses.
class HttpLib2ErrorWithResponse(HttpLib2Error):
def __init__(self, desc, response, content):
self.response = response
self.content = content
HttpLib2Error.__init__(self, desc)
class RedirectMissingLocation(HttpLib2ErrorWithResponse): pass
class RedirectLimit(HttpLib2ErrorWithResponse): pass
class FailedToDecompressContent(HttpLib2ErrorWithResponse): pass
class UnimplementedDigestAuthOptionError(HttpLib2ErrorWithResponse): pass
class UnimplementedHmacDigestAuthOptionError(HttpLib2ErrorWithResponse): pass
class MalformedHeader(HttpLib2Error): pass
class RelativeURIError(HttpLib2Error): pass
class ServerNotFoundError(HttpLib2Error): pass
class CertificateValidationUnsupportedInPython31(HttpLib2Error): pass
# Open Items:
# -----------
# Proxy support
# Are we removing the cached content too soon on PUT (only delete on 200 Maybe?)
# Pluggable cache storage (supports storing the cache in
# flat files by default. We need a plug-in architecture
# that can support Berkeley DB and Squid)
# == Known Issues ==
# Does not handle a resource that uses conneg and Last-Modified but no ETag as a cache validator.
# Does not handle Cache-Control: max-stale
# Does not use Age: headers when calculating cache freshness.
# The number of redirections to follow before giving up.
# Note that only GET redirects are automatically followed.
# Will also honor 301 requests by saving that info and never
# requesting that URI again.
DEFAULT_MAX_REDIRECTS = 5
# Which headers are hop-by-hop headers by default
HOP_BY_HOP = ['connection', 'keep-alive', 'proxy-authenticate', 'proxy-authorization', 'te', 'trailers', 'transfer-encoding', 'upgrade']
# Default CA certificates file bundled with httplib2.
CA_CERTS = os.path.join(
os.path.dirname(os.path.abspath(__file__ )), "cacerts.txt")
def _get_end2end_headers(response):
hopbyhop = list(HOP_BY_HOP)
hopbyhop.extend([x.strip() for x in response.get('connection', '').split(',')])
return [header for header in list(response.keys()) if header not in hopbyhop]
URI = re.compile(r"^(([^:/?#]+):)?(//([^/?#]*))?([^?#]*)(\?([^#]*))?(#(.*))?")
def parse_uri(uri):
"""Parses a URI using the regex given in Appendix B of RFC 3986.
(scheme, authority, path, query, fragment) = parse_uri(uri)
"""
groups = URI.match(uri).groups()
return (groups[1], groups[3], groups[4], groups[6], groups[8])
def urlnorm(uri):
(scheme, authority, path, query, fragment) = parse_uri(uri)
if not scheme or not authority:
raise RelativeURIError("Only absolute URIs are allowed. uri = %s" % uri)
authority = authority.lower()
scheme = scheme.lower()
if not path:
path = "/"
# Could do syntax based normalization of the URI before
# computing the digest. See Section 6.2.2 of Std 66.
request_uri = query and "?".join([path, query]) or path
scheme = scheme.lower()
defrag_uri = scheme + "://" + authority + request_uri
return scheme, authority, request_uri, defrag_uri
# Cache filename construction (original borrowed from Venus http://intertwingly.net/code/venus/)
re_url_scheme = re.compile(br'^\w+://')
re_url_scheme_s = re.compile(r'^\w+://')
re_slash = re.compile(br'[?/:|]+')
def safename(filename):
"""Return a filename suitable for the cache.
Strips dangerous and common characters to create a filename we
can use to store the cache in.
"""
try:
if re_url_scheme_s.match(filename):
if isinstance(filename,bytes):
filename = filename.decode('utf-8')
filename = filename.encode('idna')
else:
filename = filename.encode('idna')
except UnicodeError:
pass
if isinstance(filename,str):
filename=filename.encode('utf-8')
filemd5 = _md5(filename).hexdigest().encode('utf-8')
filename = re_url_scheme.sub(b"", filename)
filename = re_slash.sub(b",", filename)
# limit length of filename
if len(filename)>200:
filename=filename[:200]
return b",".join((filename, filemd5)).decode('utf-8')
NORMALIZE_SPACE = re.compile(r'(?:\r\n)?[ \t]+')
def _normalize_headers(headers):
return dict([ (key.lower(), NORMALIZE_SPACE.sub(value, ' ').strip()) for (key, value) in headers.items()])
def _parse_cache_control(headers):
retval = {}
if 'cache-control' in headers:
parts = headers['cache-control'].split(',')
parts_with_args = [tuple([x.strip().lower() for x in part.split("=", 1)]) for part in parts if -1 != part.find("=")]
parts_wo_args = [(name.strip().lower(), 1) for name in parts if -1 == name.find("=")]
retval = dict(parts_with_args + parts_wo_args)
return retval
# Whether to use a strict mode to parse WWW-Authenticate headers
# Might lead to bad results in case of ill-formed header value,
# so disabled by default, falling back to relaxed parsing.
# Set to true to turn on, usefull for testing servers.
USE_WWW_AUTH_STRICT_PARSING = 0
# In regex below:
# [^\0-\x1f\x7f-\xff()<>@,;:\\\"/[\]?={} \t]+ matches a "token" as defined by HTTP
# "(?:[^\0-\x08\x0A-\x1f\x7f-\xff\\\"]|\\[\0-\x7f])*?" matches a "quoted-string" as defined by HTTP, when LWS have already been replaced by a single space
# Actually, as an auth-param value can be either a token or a quoted-string, they are combined in a single pattern which matches both:
# \"?((?<=\")(?:[^\0-\x1f\x7f-\xff\\\"]|\\[\0-\x7f])*?(?=\")|(?<!\")[^\0-\x08\x0A-\x1f\x7f-\xff()<>@,;:\\\"/[\]?={} \t]+(?!\"))\"?
WWW_AUTH_STRICT = re.compile(r"^(?:\s*(?:,\s*)?([^\0-\x1f\x7f-\xff()<>@,;:\\\"/[\]?={} \t]+)\s*=\s*\"?((?<=\")(?:[^\0-\x08\x0A-\x1f\x7f-\xff\\\"]|\\[\0-\x7f])*?(?=\")|(?<!\")[^\0-\x1f\x7f-\xff()<>@,;:\\\"/[\]?={} \t]+(?!\"))\"?)(.*)$")
WWW_AUTH_RELAXED = re.compile(r"^(?:\s*(?:,\s*)?([^ \t\r\n=]+)\s*=\s*\"?((?<=\")(?:[^\\\"]|\\.)*?(?=\")|(?<!\")[^ \t\r\n,]+(?!\"))\"?)(.*)$")
UNQUOTE_PAIRS = re.compile(r'\\(.)')
def _parse_www_authenticate(headers, headername='www-authenticate'):
"""Returns a dictionary of dictionaries, one dict
per auth_scheme."""
retval = {}
if headername in headers:
try:
authenticate = headers[headername].strip()
www_auth = USE_WWW_AUTH_STRICT_PARSING and WWW_AUTH_STRICT or WWW_AUTH_RELAXED
while authenticate:
# Break off the scheme at the beginning of the line
if headername == 'authentication-info':
(auth_scheme, the_rest) = ('digest', authenticate)
else:
(auth_scheme, the_rest) = authenticate.split(" ", 1)
# Now loop over all the key value pairs that come after the scheme,
# being careful not to roll into the next scheme
match = www_auth.search(the_rest)
auth_params = {}
while match:
if match and len(match.groups()) == 3:
(key, value, the_rest) = match.groups()
auth_params[key.lower()] = UNQUOTE_PAIRS.sub(r'\1', value) # '\\'.join([x.replace('\\', '') for x in value.split('\\\\')])
match = www_auth.search(the_rest)
retval[auth_scheme.lower()] = auth_params
authenticate = the_rest.strip()
except ValueError:
raise MalformedHeader("WWW-Authenticate")
return retval
def _entry_disposition(response_headers, request_headers):
"""Determine freshness from the Date, Expires and Cache-Control headers.
We don't handle the following:
1. Cache-Control: max-stale
2. Age: headers are not used in the calculations.
Not that this algorithm is simpler than you might think
because we are operating as a private (non-shared) cache.
This lets us ignore 's-maxage'. We can also ignore
'proxy-invalidate' since we aren't a proxy.
We will never return a stale document as
fresh as a design decision, and thus the non-implementation
of 'max-stale'. This also lets us safely ignore 'must-revalidate'
since we operate as if every server has sent 'must-revalidate'.
Since we are private we get to ignore both 'public' and
'private' parameters. We also ignore 'no-transform' since
we don't do any transformations.
The 'no-store' parameter is handled at a higher level.
So the only Cache-Control parameters we look at are:
no-cache
only-if-cached
max-age
min-fresh
"""
retval = "STALE"
cc = _parse_cache_control(request_headers)
cc_response = _parse_cache_control(response_headers)
if 'pragma' in request_headers and request_headers['pragma'].lower().find('no-cache') != -1:
retval = "TRANSPARENT"
if 'cache-control' not in request_headers:
request_headers['cache-control'] = 'no-cache'
elif 'no-cache' in cc:
retval = "TRANSPARENT"
elif 'no-cache' in cc_response:
retval = "STALE"
elif 'only-if-cached' in cc:
retval = "FRESH"
elif 'date' in response_headers:
date = calendar.timegm(email.utils.parsedate_tz(response_headers['date']))
now = time.time()
current_age = max(0, now - date)
if 'max-age' in cc_response:
try:
freshness_lifetime = int(cc_response['max-age'])
except ValueError:
freshness_lifetime = 0
elif 'expires' in response_headers:
expires = email.utils.parsedate_tz(response_headers['expires'])
if None == expires:
freshness_lifetime = 0
else:
freshness_lifetime = max(0, calendar.timegm(expires) - date)
else:
freshness_lifetime = 0
if 'max-age' in cc:
try:
freshness_lifetime = int(cc['max-age'])
except ValueError:
freshness_lifetime = 0
if 'min-fresh' in cc:
try:
min_fresh = int(cc['min-fresh'])
except ValueError:
min_fresh = 0
current_age += min_fresh
if freshness_lifetime > current_age:
retval = "FRESH"
return retval
def _decompressContent(response, new_content):
content = new_content
try:
encoding = response.get('content-encoding', None)
if encoding in ['gzip', 'deflate']:
if encoding == 'gzip':
content = gzip.GzipFile(fileobj=io.BytesIO(new_content)).read()
if encoding == 'deflate':
content = zlib.decompress(content)
response['content-length'] = str(len(content))
# Record the historical presence of the encoding in a way the won't interfere.
response['-content-encoding'] = response['content-encoding']
del response['content-encoding']
except IOError:
content = ""
raise FailedToDecompressContent(_("Content purported to be compressed with %s but failed to decompress.") % response.get('content-encoding'), response, content)
return content
def _bind_write_headers(msg):
from email.header import Header
def _write_headers(self):
# Self refers to the Generator object
for h, v in msg.items():
print('%s:' % h, end=' ', file=self._fp)
if isinstance(v, Header):
print(v.encode(maxlinelen=self._maxheaderlen), file=self._fp)
else:
# Header's got lots of smarts, so use it.
header = Header(v, maxlinelen=self._maxheaderlen, charset='utf-8',
header_name=h)
print(header.encode(), file=self._fp)
# A blank line always separates headers from body
print(file=self._fp)
return _write_headers
def _updateCache(request_headers, response_headers, content, cache, cachekey):
if cachekey:
cc = _parse_cache_control(request_headers)
cc_response = _parse_cache_control(response_headers)
if 'no-store' in cc or 'no-store' in cc_response:
cache.delete(cachekey)
else:
info = email.message.Message()
for key, value in response_headers.items():
if key not in ['status','content-encoding','transfer-encoding']:
info[key] = value
# Add annotations to the cache to indicate what headers
# are variant for this request.
vary = response_headers.get('vary', None)
if vary:
vary_headers = vary.lower().replace(' ', '').split(',')
for header in vary_headers:
key = '-varied-%s' % header
try:
info[key] = request_headers[header]
except KeyError:
pass
status = response_headers.status
if status == 304:
status = 200
status_header = 'status: %d\r\n' % status
try:
header_str = info.as_string()
except UnicodeEncodeError:
setattr(info, '_write_headers', _bind_write_headers(info))
header_str = info.as_string()
header_str = re.sub("\r(?!\n)|(?<!\r)\n", "\r\n", header_str)
text = b"".join([status_header.encode('utf-8'), header_str.encode('utf-8'), content])
cache.set(cachekey, text)
def _cnonce():
dig = _md5(("%s:%s" % (time.ctime(), ["0123456789"[random.randrange(0, 9)] for i in range(20)])).encode('utf-8')).hexdigest()
return dig[:16]
def _wsse_username_token(cnonce, iso_now, password):
return base64.b64encode(_sha(("%s%s%s" % (cnonce, iso_now, password)).encode('utf-8')).digest()).strip()
# For credentials we need two things, first
# a pool of credential to try (not necesarily tied to BAsic, Digest, etc.)
# Then we also need a list of URIs that have already demanded authentication
# That list is tricky since sub-URIs can take the same auth, or the
# auth scheme may change as you descend the tree.
# So we also need each Auth instance to be able to tell us
# how close to the 'top' it is.
class Authentication(object):
def __init__(self, credentials, host, request_uri, headers, response, content, http):
(scheme, authority, path, query, fragment) = parse_uri(request_uri)
self.path = path
self.host = host
self.credentials = credentials
self.http = http
def depth(self, request_uri):
(scheme, authority, path, query, fragment) = parse_uri(request_uri)
return request_uri[len(self.path):].count("/")
def inscope(self, host, request_uri):
# XXX Should we normalize the request_uri?
(scheme, authority, path, query, fragment) = parse_uri(request_uri)
return (host == self.host) and path.startswith(self.path)
def request(self, method, request_uri, headers, content):
"""Modify the request headers to add the appropriate
Authorization header. Over-rise this in sub-classes."""
pass
def response(self, response, content):
"""Gives us a chance to update with new nonces
or such returned from the last authorized response.
Over-rise this in sub-classes if necessary.
Return TRUE is the request is to be retried, for
example Digest may return stale=true.
"""
return False
def __eq__(self, auth):
return False
def __ne__(self, auth):
return True
def __lt__(self, auth):
return True
def __gt__(self, auth):
return False
def __le__(self, auth):
return True
def __ge__(self, auth):
return False
def __bool__(self):
return True
class BasicAuthentication(Authentication):
def __init__(self, credentials, host, request_uri, headers, response, content, http):
Authentication.__init__(self, credentials, host, request_uri, headers, response, content, http)
def request(self, method, request_uri, headers, content):
"""Modify the request headers to add the appropriate
Authorization header."""
headers['authorization'] = 'Basic ' + base64.b64encode(("%s:%s" % self.credentials).encode('utf-8')).strip().decode('utf-8')
class DigestAuthentication(Authentication):
"""Only do qop='auth' and MD5, since that
is all Apache currently implements"""
def __init__(self, credentials, host, request_uri, headers, response, content, http):
Authentication.__init__(self, credentials, host, request_uri, headers, response, content, http)
challenge = _parse_www_authenticate(response, 'www-authenticate')
self.challenge = challenge['digest']
qop = self.challenge.get('qop', 'auth')
self.challenge['qop'] = ('auth' in [x.strip() for x in qop.split()]) and 'auth' or None
if self.challenge['qop'] is None:
raise UnimplementedDigestAuthOptionError( _("Unsupported value for qop: %s." % qop))
self.challenge['algorithm'] = self.challenge.get('algorithm', 'MD5').upper()
if self.challenge['algorithm'] != 'MD5':
raise UnimplementedDigestAuthOptionError( _("Unsupported value for algorithm: %s." % self.challenge['algorithm']))
self.A1 = "".join([self.credentials[0], ":", self.challenge['realm'], ":", self.credentials[1]])
self.challenge['nc'] = 1
def request(self, method, request_uri, headers, content, cnonce = None):
"""Modify the request headers"""
H = lambda x: _md5(x.encode('utf-8')).hexdigest()
KD = lambda s, d: H("%s:%s" % (s, d))
A2 = "".join([method, ":", request_uri])
self.challenge['cnonce'] = cnonce or _cnonce()
request_digest = '"%s"' % KD(H(self.A1), "%s:%s:%s:%s:%s" % (
self.challenge['nonce'],
'%08x' % self.challenge['nc'],
self.challenge['cnonce'],
self.challenge['qop'], H(A2)))
headers['authorization'] = 'Digest username="%s", realm="%s", nonce="%s", uri="%s", algorithm=%s, response=%s, qop=%s, nc=%08x, cnonce="%s"' % (
self.credentials[0],
self.challenge['realm'],
self.challenge['nonce'],
request_uri,
self.challenge['algorithm'],
request_digest,
self.challenge['qop'],
self.challenge['nc'],
self.challenge['cnonce'])
if self.challenge.get('opaque'):
headers['authorization'] += ', opaque="%s"' % self.challenge['opaque']
self.challenge['nc'] += 1
def response(self, response, content):
if 'authentication-info' not in response:
challenge = _parse_www_authenticate(response, 'www-authenticate').get('digest', {})
if 'true' == challenge.get('stale'):
self.challenge['nonce'] = challenge['nonce']
self.challenge['nc'] = 1
return True
else:
updated_challenge = _parse_www_authenticate(response, 'authentication-info').get('digest', {})
if 'nextnonce' in updated_challenge:
self.challenge['nonce'] = updated_challenge['nextnonce']
self.challenge['nc'] = 1
return False
class HmacDigestAuthentication(Authentication):
"""Adapted from Robert Sayre's code and DigestAuthentication above."""
__author__ = "Thomas Broyer ([email protected])"
def __init__(self, credentials, host, request_uri, headers, response, content, http):
Authentication.__init__(self, credentials, host, request_uri, headers, response, content, http)
challenge = _parse_www_authenticate(response, 'www-authenticate')
self.challenge = challenge['hmacdigest']
# TODO: self.challenge['domain']
self.challenge['reason'] = self.challenge.get('reason', 'unauthorized')
if self.challenge['reason'] not in ['unauthorized', 'integrity']:
self.challenge['reason'] = 'unauthorized'
self.challenge['salt'] = self.challenge.get('salt', '')
if not self.challenge.get('snonce'):
raise UnimplementedHmacDigestAuthOptionError( _("The challenge doesn't contain a server nonce, or this one is empty."))
self.challenge['algorithm'] = self.challenge.get('algorithm', 'HMAC-SHA-1')
if self.challenge['algorithm'] not in ['HMAC-SHA-1', 'HMAC-MD5']:
raise UnimplementedHmacDigestAuthOptionError( _("Unsupported value for algorithm: %s." % self.challenge['algorithm']))
self.challenge['pw-algorithm'] = self.challenge.get('pw-algorithm', 'SHA-1')
if self.challenge['pw-algorithm'] not in ['SHA-1', 'MD5']:
raise UnimplementedHmacDigestAuthOptionError( _("Unsupported value for pw-algorithm: %s." % self.challenge['pw-algorithm']))
if self.challenge['algorithm'] == 'HMAC-MD5':
self.hashmod = _md5
else:
self.hashmod = _sha
if self.challenge['pw-algorithm'] == 'MD5':
self.pwhashmod = _md5
else:
self.pwhashmod = _sha
self.key = "".join([self.credentials[0], ":",
self.pwhashmod.new("".join([self.credentials[1], self.challenge['salt']])).hexdigest().lower(),
":", self.challenge['realm']])
self.key = self.pwhashmod.new(self.key).hexdigest().lower()
def request(self, method, request_uri, headers, content):
"""Modify the request headers"""
keys = _get_end2end_headers(headers)
keylist = "".join(["%s " % k for k in keys])
headers_val = "".join([headers[k] for k in keys])
created = time.strftime('%Y-%m-%dT%H:%M:%SZ',time.gmtime())
cnonce = _cnonce()
request_digest = "%s:%s:%s:%s:%s" % (method, request_uri, cnonce, self.challenge['snonce'], headers_val)
request_digest = hmac.new(self.key, request_digest, self.hashmod).hexdigest().lower()
headers['authorization'] = 'HMACDigest username="%s", realm="%s", snonce="%s", cnonce="%s", uri="%s", created="%s", response="%s", headers="%s"' % (
self.credentials[0],
self.challenge['realm'],
self.challenge['snonce'],
cnonce,
request_uri,
created,
request_digest,
keylist)
def response(self, response, content):
challenge = _parse_www_authenticate(response, 'www-authenticate').get('hmacdigest', {})
if challenge.get('reason') in ['integrity', 'stale']:
return True
return False
class WsseAuthentication(Authentication):
"""This is thinly tested and should not be relied upon.
At this time there isn't any third party server to test against.
Blogger and TypePad implemented this algorithm at one point
but Blogger has since switched to Basic over HTTPS and
TypePad has implemented it wrong, by never issuing a 401
challenge but instead requiring your client to telepathically know that
their endpoint is expecting WSSE profile="UsernameToken"."""
def __init__(self, credentials, host, request_uri, headers, response, content, http):
Authentication.__init__(self, credentials, host, request_uri, headers, response, content, http)
def request(self, method, request_uri, headers, content):
"""Modify the request headers to add the appropriate
Authorization header."""
headers['authorization'] = 'WSSE profile="UsernameToken"'
iso_now = time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime())
cnonce = _cnonce()
password_digest = _wsse_username_token(cnonce, iso_now, self.credentials[1])
headers['X-WSSE'] = 'UsernameToken Username="%s", PasswordDigest="%s", Nonce="%s", Created="%s"' % (
self.credentials[0],
password_digest,
cnonce,
iso_now)
class GoogleLoginAuthentication(Authentication):
def __init__(self, credentials, host, request_uri, headers, response, content, http):
from urllib.parse import urlencode
Authentication.__init__(self, credentials, host, request_uri, headers, response, content, http)
challenge = _parse_www_authenticate(response, 'www-authenticate')
service = challenge['googlelogin'].get('service', 'xapi')
# Bloggger actually returns the service in the challenge
# For the rest we guess based on the URI
if service == 'xapi' and request_uri.find("calendar") > 0:
service = "cl"
# No point in guessing Base or Spreadsheet
#elif request_uri.find("spreadsheets") > 0:
# service = "wise"
auth = dict(Email=credentials[0], Passwd=credentials[1], service=service, source=headers['user-agent'])
resp, content = self.http.request("https://www.google.com/accounts/ClientLogin", method="POST", body=urlencode(auth), headers={'Content-Type': 'application/x-www-form-urlencoded'})
lines = content.split('\n')
d = dict([tuple(line.split("=", 1)) for line in lines if line])
if resp.status == 403:
self.Auth = ""
else:
self.Auth = d['Auth']
def request(self, method, request_uri, headers, content):
"""Modify the request headers to add the appropriate
Authorization header."""
headers['authorization'] = 'GoogleLogin Auth=' + self.Auth
AUTH_SCHEME_CLASSES = {
"basic": BasicAuthentication,
"wsse": WsseAuthentication,
"digest": DigestAuthentication,
"hmacdigest": HmacDigestAuthentication,
"googlelogin": GoogleLoginAuthentication
}
AUTH_SCHEME_ORDER = ["hmacdigest", "googlelogin", "digest", "wsse", "basic"]
class FileCache(object):
"""Uses a local directory as a store for cached files.
Not really safe to use if multiple threads or processes are going to
be running on the same cache.
"""
def __init__(self, cache, safe=safename): # use safe=lambda x: md5.new(x).hexdigest() for the old behavior
self.cache = cache
self.safe = safe
if not os.path.exists(cache):
os.makedirs(self.cache)
def get(self, key):
retval = None
cacheFullPath = os.path.join(self.cache, self.safe(key))
try:
f = open(cacheFullPath, "rb")
retval = f.read()
f.close()
except IOError:
pass
return retval
def set(self, key, value):
cacheFullPath = os.path.join(self.cache, self.safe(key))
f = open(cacheFullPath, "wb")
f.write(value)
f.close()
def delete(self, key):
cacheFullPath = os.path.join(self.cache, self.safe(key))
if os.path.exists(cacheFullPath):
os.remove(cacheFullPath)
class Credentials(object):
def __init__(self):
self.credentials = []
def add(self, name, password, domain=""):
self.credentials.append((domain.lower(), name, password))
def clear(self):
self.credentials = []
def iter(self, domain):
for (cdomain, name, password) in self.credentials:
if cdomain == "" or domain == cdomain:
yield (name, password)
class KeyCerts(Credentials):
"""Identical to Credentials except that
name/password are mapped to key/cert."""
pass
class ProxyInfo(object):
"""Collect information required to use a proxy."""
def __init__(self, proxy_type, proxy_host, proxy_port, proxy_rdns=None, proxy_user=None, proxy_pass=None):
"""The parameter proxy_type must be set to one of socks.PROXY_TYPE_XXX
constants. For example:
p = ProxyInfo(proxy_type=socks.PROXY_TYPE_HTTP, proxy_host='localhost', proxy_port=8000)
"""
self.proxy_type, self.proxy_host, self.proxy_port, self.proxy_rdns, self.proxy_user, self.proxy_pass = proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass
def astuple(self):
return (self.proxy_type, self.proxy_host, self.proxy_port, self.proxy_rdns,
self.proxy_user, self.proxy_pass)
def isgood(self):
return socks and (self.proxy_host != None) and (self.proxy_port != None)
def proxy_info_from_environment(method='http'):
"""
Read proxy info from the environment variables.
"""
if method not in ('http', 'https'):
return
env_var = method + '_proxy'
url = os.environ.get(env_var, os.environ.get(env_var.upper()))
if not url:
return
return proxy_info_from_url(url, method)
def proxy_info_from_url(url, method='http'):
"""
Construct a ProxyInfo from a URL (such as http_proxy env var)
"""
url = urllib.parse.urlparse(url)
username = None
password = None
port = None
if '@' in url[1]:
ident, host_port = url[1].split('@', 1)
if ':' in ident:
username, password = ident.split(':', 1)
else:
password = ident
else:
host_port = url[1]
if ':' in host_port:
host, port = host_port.split(':', 1)
else:
host = host_port
if port:
port = int(port)
else:
port = dict(https=443, http=80)[method]
proxy_type = 3 # socks.PROXY_TYPE_HTTP
return ProxyInfo(
proxy_type = proxy_type,
proxy_host = host,
proxy_port = port,
proxy_user = username or None,
proxy_pass = password or None,
)
class HTTPConnectionWithTimeout(http.client.HTTPConnection):
"""HTTPConnection subclass that supports timeouts
HTTPConnection subclass that supports timeouts
All timeouts are in seconds. If None is passed for timeout then
Python's default timeout for sockets will be used. See for example
the docs of socket.setdefaulttimeout():
http://docs.python.org/library/socket.html#socket.setdefaulttimeout
"""
def __init__(self, host, port=None, timeout=None, proxy_info=None):
http.client.HTTPConnection.__init__(self, host, port=port,
timeout=timeout)
self.proxy_info = proxy_info
class HTTPSConnectionWithTimeout(http.client.HTTPSConnection):
"""
This class allows communication via SSL.
All timeouts are in seconds. If None is passed for timeout then
Python's default timeout for sockets will be used. See for example
the docs of socket.setdefaulttimeout():
http://docs.python.org/library/socket.html#socket.setdefaulttimeout
"""
def __init__(self, host, port=None, key_file=None, cert_file=None,
timeout=None, proxy_info=None,
ca_certs=None, disable_ssl_certificate_validation=False):
self.proxy_info = proxy_info
context = None
if ca_certs is None:
ca_certs = CA_CERTS
if (cert_file or ca_certs) and not disable_ssl_certificate_validation:
if not hasattr(ssl, 'SSLContext'):
raise CertificateValidationUnsupportedInPython31()
context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
context.verify_mode = ssl.CERT_REQUIRED
if cert_file:
context.load_cert_chain(cert_file, key_file)
if ca_certs:
context.load_verify_locations(ca_certs)
http.client.HTTPSConnection.__init__(
self, host, port=port, key_file=key_file,
cert_file=cert_file, timeout=timeout, context=context,
check_hostname=True)
SCHEME_TO_CONNECTION = {
'http': HTTPConnectionWithTimeout,
'https': HTTPSConnectionWithTimeout,
}
class Http(object):
"""An HTTP client that handles:
- all methods
- caching
- ETags
- compression,
- HTTPS
- Basic
- Digest
- WSSE
and more.
"""
def __init__(self, cache=None, timeout=None,
proxy_info=proxy_info_from_environment,
ca_certs=None, disable_ssl_certificate_validation=False):
"""If 'cache' is a string then it is used as a directory name for
a disk cache. Otherwise it must be an object that supports the
same interface as FileCache.
All timeouts are in seconds. If None is passed for timeout
then Python's default timeout for sockets will be used. See
for example the docs of socket.setdefaulttimeout():
http://docs.python.org/library/socket.html#socket.setdefaulttimeout
`proxy_info` may be:
- a callable that takes the http scheme ('http' or 'https') and
returns a ProxyInfo instance per request. By default, uses
proxy_info_from_environment.
- a ProxyInfo instance (static proxy config).
- None (proxy disabled).
ca_certs is the path of a file containing root CA certificates for SSL
server certificate validation. By default, a CA cert file bundled with
httplib2 is used.
If disable_ssl_certificate_validation is true, SSL cert validation will
not be performed.
"""
self.proxy_info = proxy_info
self.ca_certs = ca_certs
self.disable_ssl_certificate_validation = \
disable_ssl_certificate_validation
# Map domain name to an httplib connection
self.connections = {}
# The location of the cache, for now a directory
# where cached responses are held.
if cache and isinstance(cache, str):
self.cache = FileCache(cache)
else:
self.cache = cache
# Name/password
self.credentials = Credentials()
# Key/cert
self.certificates = KeyCerts()
# authorization objects
self.authorizations = []
# If set to False then no redirects are followed, even safe ones.
self.follow_redirects = True
# Which HTTP methods do we apply optimistic concurrency to, i.e.
# which methods get an "if-match:" etag header added to them.
self.optimistic_concurrency_methods = ["PUT", "PATCH"]
# If 'follow_redirects' is True, and this is set to True then
# all redirecs are followed, including unsafe ones.
self.follow_all_redirects = False
self.ignore_etag = False
self.force_exception_to_status_code = False
self.timeout = timeout
# Keep Authorization: headers on a redirect.
self.forward_authorization_headers = False
def __getstate__(self):
state_dict = copy.copy(self.__dict__)
# In case request is augmented by some foreign object such as
# credentials which handle auth
if 'request' in state_dict:
del state_dict['request']
if 'connections' in state_dict:
del state_dict['connections']
return state_dict
def __setstate__(self, state):
self.__dict__.update(state)
self.connections = {}
def _auth_from_challenge(self, host, request_uri, headers, response, content):
"""A generator that creates Authorization objects
that can be applied to requests.
"""
challenges = _parse_www_authenticate(response, 'www-authenticate')
for cred in self.credentials.iter(host):
for scheme in AUTH_SCHEME_ORDER:
if scheme in challenges:
yield AUTH_SCHEME_CLASSES[scheme](cred, host, request_uri, headers, response, content, self)
def add_credentials(self, name, password, domain=""):
"""Add a name and password that will be used
any time a request requires authentication."""
self.credentials.add(name, password, domain)
def add_certificate(self, key, cert, domain):
"""Add a key and cert that will be used
any time a request requires authentication."""
self.certificates.add(key, cert, domain)
def clear_credentials(self):
"""Remove all the names and passwords
that are used for authentication"""
self.credentials.clear()
self.authorizations = []
def _conn_request(self, conn, request_uri, method, body, headers):
for i in range(RETRIES):
try:
if conn.sock is None:
conn.connect()
conn.request(method, request_uri, body, headers)
except socket.timeout:
conn.close()
raise
except socket.gaierror:
conn.close()
raise ServerNotFoundError("Unable to find the server at %s" % conn.host)
except socket.error as e:
errno_ = (e.args[0].errno if isinstance(e.args[0], socket.error) else e.errno)
if errno_ == errno.ECONNREFUSED: # Connection refused
raise
except http.client.HTTPException:
if conn.sock is None:
if i < RETRIES-1:
conn.close()
conn.connect()
continue
else:
conn.close()
raise
if i < RETRIES-1:
conn.close()
conn.connect()
continue
# Just because the server closed the connection doesn't apparently mean
# that the server didn't send a response.
pass
try:
response = conn.getresponse()
except socket.timeout:
raise
except (socket.error, http.client.HTTPException):
conn.close()
if i == 0:
conn.close()
conn.connect()
continue
else:
raise
else:
content = b""
if method == "HEAD":
conn.close()
else:
content = response.read()
response = Response(response)
if method != "HEAD":
content = _decompressContent(response, content)
break
return (response, content)
def _request(self, conn, host, absolute_uri, request_uri, method, body, headers, redirections, cachekey):
"""Do the actual request using the connection object
and also follow one level of redirects if necessary"""
auths = [(auth.depth(request_uri), auth) for auth in self.authorizations if auth.inscope(host, request_uri)]
auth = auths and sorted(auths)[0][1] or None
if auth:
auth.request(method, request_uri, headers, body)
(response, content) = self._conn_request(conn, request_uri, method, body, headers)
if auth:
if auth.response(response, body):
auth.request(method, request_uri, headers, body)
(response, content) = self._conn_request(conn, request_uri, method, body, headers )
response._stale_digest = 1
if response.status == 401:
for authorization in self._auth_from_challenge(host, request_uri, headers, response, content):
authorization.request(method, request_uri, headers, body)
(response, content) = self._conn_request(conn, request_uri, method, body, headers, )
if response.status != 401:
self.authorizations.append(authorization)
authorization.response(response, body)
break
if (self.follow_all_redirects or (method in ["GET", "HEAD"]) or response.status == 303):
if self.follow_redirects and response.status in [300, 301, 302, 303, 307]:
# Pick out the location header and basically start from the beginning
# remembering first to strip the ETag header and decrement our 'depth'
if redirections:
if 'location' not in response and response.status != 300:
raise RedirectMissingLocation( _("Redirected but the response is missing a Location: header."), response, content)
# Fix-up relative redirects (which violate an RFC 2616 MUST)
if 'location' in response:
location = response['location']
(scheme, authority, path, query, fragment) = parse_uri(location)
if authority == None:
response['location'] = urllib.parse.urljoin(absolute_uri, location)
if response.status == 301 and method in ["GET", "HEAD"]:
response['-x-permanent-redirect-url'] = response['location']
if 'content-location' not in response:
response['content-location'] = absolute_uri
_updateCache(headers, response, content, self.cache, cachekey)
if 'if-none-match' in headers:
del headers['if-none-match']
if 'if-modified-since' in headers:
del headers['if-modified-since']
if 'authorization' in headers and not self.forward_authorization_headers:
del headers['authorization']
if 'location' in response:
location = response['location']
old_response = copy.deepcopy(response)
if 'content-location' not in old_response:
old_response['content-location'] = absolute_uri
redirect_method = method
if response.status in [302, 303]:
redirect_method = "GET"
body = None
(response, content) = self.request(location, redirect_method, body=body, headers = headers, redirections = redirections - 1)
response.previous = old_response
else:
raise RedirectLimit("Redirected more times than redirection_limit allows.", response, content)
elif response.status in [200, 203] and method in ["GET", "HEAD"]:
# Don't cache 206's since we aren't going to handle byte range requests
if 'content-location' not in response:
response['content-location'] = absolute_uri
_updateCache(headers, response, content, self.cache, cachekey)
return (response, content)
def _normalize_headers(self, headers):
return _normalize_headers(headers)
# Need to catch and rebrand some exceptions
# Then need to optionally turn all exceptions into status codes
# including all socket.* and httplib.* exceptions.
def request(self, uri, method="GET", body=None, headers=None, redirections=DEFAULT_MAX_REDIRECTS, connection_type=None):
""" Performs a single HTTP request.
The 'uri' is the URI of the HTTP resource and can begin
with either 'http' or 'https'. The value of 'uri' must be an absolute URI.
The 'method' is the HTTP method to perform, such as GET, POST, DELETE, etc.
There is no restriction on the methods allowed.
The 'body' is the entity body to be sent with the request. It is a string
object.
Any extra headers that are to be sent with the request should be provided in the
'headers' dictionary.
The maximum number of redirect to follow before raising an
exception is 'redirections. The default is 5.
The return value is a tuple of (response, content), the first
being and instance of the 'Response' class, the second being
a string that contains the response entity body.
"""
try:
if headers is None:
headers = {}
else:
headers = self._normalize_headers(headers)
if 'user-agent' not in headers:
headers['user-agent'] = "Python-httplib2/%s (gzip)" % __version__
uri = iri2uri(uri)
(scheme, authority, request_uri, defrag_uri) = urlnorm(uri)
domain_port = authority.split(":")[0:2]
if len(domain_port) == 2 and domain_port[1] == '443' and scheme == 'http':
scheme = 'https'
authority = domain_port[0]
conn_key = scheme+":"+authority
if conn_key in self.connections:
conn = self.connections[conn_key]
else:
if not connection_type:
connection_type = SCHEME_TO_CONNECTION[scheme]
certs = list(self.certificates.iter(authority))
if issubclass(connection_type, HTTPSConnectionWithTimeout):
if certs:
conn = self.connections[conn_key] = connection_type(
authority, key_file=certs[0][0],
cert_file=certs[0][1], timeout=self.timeout,
proxy_info=self.proxy_info,
ca_certs=self.ca_certs,
disable_ssl_certificate_validation=
self.disable_ssl_certificate_validation)
else:
conn = self.connections[conn_key] = connection_type(
authority, timeout=self.timeout,
proxy_info=self.proxy_info,
ca_certs=self.ca_certs,
disable_ssl_certificate_validation=
self.disable_ssl_certificate_validation)
else:
conn = self.connections[conn_key] = connection_type(
authority, timeout=self.timeout,
proxy_info=self.proxy_info)
conn.set_debuglevel(debuglevel)
if 'range' not in headers and 'accept-encoding' not in headers:
headers['accept-encoding'] = 'gzip, deflate'
info = email.message.Message()
cached_value = None
if self.cache:
cachekey = defrag_uri
cached_value = self.cache.get(cachekey)
if cached_value:
try:
info, content = cached_value.split(b'\r\n\r\n', 1)
info = email.message_from_bytes(info)
for k, v in info.items():
if v.startswith('=?') and v.endswith('?='):
info.replace_header(k,
str(*email.header.decode_header(v)[0]))
except (IndexError, ValueError):
self.cache.delete(cachekey)
cachekey = None
cached_value = None
else:
cachekey = None
if method in self.optimistic_concurrency_methods and self.cache and 'etag' in info and not self.ignore_etag and 'if-match' not in headers:
# http://www.w3.org/1999/04/Editing/
headers['if-match'] = info['etag']
if method not in ["GET", "HEAD"] and self.cache and cachekey:
# RFC 2616 Section 13.10
self.cache.delete(cachekey)
# Check the vary header in the cache to see if this request
# matches what varies in the cache.
if method in ['GET', 'HEAD'] and 'vary' in info:
vary = info['vary']
vary_headers = vary.lower().replace(' ', '').split(',')
for header in vary_headers:
key = '-varied-%s' % header
value = info[key]
if headers.get(header, None) != value:
cached_value = None
break
if cached_value and method in ["GET", "HEAD"] and self.cache and 'range' not in headers:
if '-x-permanent-redirect-url' in info:
# Should cached permanent redirects be counted in our redirection count? For now, yes.
if redirections <= 0:
raise RedirectLimit("Redirected more times than redirection_limit allows.", {}, "")
(response, new_content) = self.request(info['-x-permanent-redirect-url'], "GET", headers = headers, redirections = redirections - 1)
response.previous = Response(info)
response.previous.fromcache = True
else:
# Determine our course of action:
# Is the cached entry fresh or stale?
# Has the client requested a non-cached response?
#
# There seems to be three possible answers:
# 1. [FRESH] Return the cache entry w/o doing a GET
# 2. [STALE] Do the GET (but add in cache validators if available)
# 3. [TRANSPARENT] Do a GET w/o any cache validators (Cache-Control: no-cache) on the request
entry_disposition = _entry_disposition(info, headers)
if entry_disposition == "FRESH":
if not cached_value:
info['status'] = '504'
content = b""
response = Response(info)
if cached_value:
response.fromcache = True
return (response, content)
if entry_disposition == "STALE":
if 'etag' in info and not self.ignore_etag and not 'if-none-match' in headers:
headers['if-none-match'] = info['etag']
if 'last-modified' in info and not 'last-modified' in headers:
headers['if-modified-since'] = info['last-modified']
elif entry_disposition == "TRANSPARENT":
pass
(response, new_content) = self._request(conn, authority, uri, request_uri, method, body, headers, redirections, cachekey)
if response.status == 304 and method == "GET":
# Rewrite the cache entry with the new end-to-end headers
# Take all headers that are in response
# and overwrite their values in info.
# unless they are hop-by-hop, or are listed in the connection header.
for key in _get_end2end_headers(response):
info[key] = response[key]
merged_response = Response(info)
if hasattr(response, "_stale_digest"):
merged_response._stale_digest = response._stale_digest
_updateCache(headers, merged_response, content, self.cache, cachekey)
response = merged_response
response.status = 200
response.fromcache = True
elif response.status == 200:
content = new_content
else:
self.cache.delete(cachekey)
content = new_content
else:
cc = _parse_cache_control(headers)
if 'only-if-cached'in cc:
info['status'] = '504'
response = Response(info)
content = b""
else:
(response, content) = self._request(conn, authority, uri, request_uri, method, body, headers, redirections, cachekey)
except Exception as e:
if self.force_exception_to_status_code:
if isinstance(e, HttpLib2ErrorWithResponse):
response = e.response
content = e.content
response.status = 500
response.reason = str(e)
elif isinstance(e, socket.timeout):
content = b"Request Timeout"
response = Response({
"content-type": "text/plain",
"status": "408",
"content-length": len(content)
})
response.reason = "Request Timeout"
else:
content = str(e).encode('utf-8')
response = Response({
"content-type": "text/plain",
"status": "400",
"content-length": len(content)
})
response.reason = "Bad Request"
else:
raise
return (response, content)
class Response(dict):
"""An object more like email.message than httplib.HTTPResponse."""
"""Is this response from our local cache"""
fromcache = False
"""HTTP protocol version used by server. 10 for HTTP/1.0, 11 for HTTP/1.1. """
version = 11
"Status code returned by server. "
status = 200
"""Reason phrase returned by server."""
reason = "Ok"
previous = None
def __init__(self, info):
# info is either an email.message or
# an httplib.HTTPResponse object.
if isinstance(info, http.client.HTTPResponse):
for key, value in info.getheaders():
key = key.lower()
prev = self.get(key)
if prev is not None:
value = ', '.join((prev, value))
self[key] = value
self.status = info.status
self['status'] = str(self.status)
self.reason = info.reason
self.version = info.version
elif isinstance(info, email.message.Message):
for key, value in list(info.items()):
self[key.lower()] = value
self.status = int(self['status'])
else:
for key, value in info.items():
self[key.lower()] = value
self.status = int(self.get('status', self.status))
def __getattr__(self, name):
if name == 'dict':
return self
else:
raise AttributeError(name)
|
mit
|
MostafaGazar/tensorflow
|
tensorflow/python/kernel_tests/matrix_inverse_op_test.py
|
15
|
3457
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tensorflow.ops.math_ops.matrix_inverse."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import tensorflow as tf
class InverseOpTest(tf.test.TestCase):
def _verifyInverse(self, x):
for np_type in [np.float32, np.float64]:
for adjoint in False, True:
y = x.astype(np_type)
with self.test_session():
# Verify that x^{-1} * x == Identity matrix.
inv = tf.matrix_inverse(y, adjoint=adjoint)
tf_ans = tf.batch_matmul(inv, y, adj_y=adjoint)
np_ans = np.identity(y.shape[-1])
if x.ndim > 2:
tiling = list(y.shape)
tiling[-2:] = [1, 1]
np_ans = np.tile(np_ans, tiling)
out = tf_ans.eval()
self.assertAllClose(np_ans, out)
self.assertShapeEqual(y, tf_ans)
def testNonsymmetric(self):
# 2x2 matrices
matrix1 = np.array([[1., 2.], [3., 4.]])
matrix2 = np.array([[1., 3.], [3., 5.]])
self._verifyInverse(matrix1)
self._verifyInverse(matrix2)
# A multidimensional batch of 2x2 matrices
matrix_batch = np.concatenate([np.expand_dims(matrix1, 0),
np.expand_dims(matrix2, 0)])
matrix_batch = np.tile(matrix_batch, [2, 3, 1, 1])
self._verifyInverse(matrix_batch)
def testSymmetricPositiveDefinite(self):
# 2x2 matrices
matrix1 = np.array([[2., 1.], [1., 2.]])
matrix2 = np.array([[3., -1.], [-1., 3.]])
self._verifyInverse(matrix1)
self._verifyInverse(matrix2)
# A multidimensional batch of 2x2 matrices
matrix_batch = np.concatenate([np.expand_dims(matrix1, 0), np.expand_dims(
matrix2, 0)])
matrix_batch = np.tile(matrix_batch, [2, 3, 1, 1])
self._verifyInverse(matrix_batch)
def testNonSquareMatrix(self):
# When the inverse of a non-square matrix is attempted we should return
# an error
with self.assertRaises(ValueError):
tf.matrix_inverse(np.array([[1., 2., 3.], [3., 4., 5.]]))
def testWrongDimensions(self):
# The input to the inverse should be at least a 2-dimensional tensor.
tensor3 = tf.constant([1., 2.])
with self.assertRaises(ValueError):
tf.matrix_inverse(tensor3)
def testNotInvertible(self):
# The input should be invertible.
with self.test_session():
with self.assertRaisesOpError("Input is not invertible."):
# All rows of the matrix below add to zero.
tensor3 = tf.constant([[1., 0., -1.], [-1., 1., 0.], [0., -1., 1.]])
tf.matrix_inverse(tensor3).eval()
def testEmpty(self):
self._verifyInverse(np.empty([0, 2, 2]))
self._verifyInverse(np.empty([2, 0, 0]))
if __name__ == "__main__":
tf.test.main()
|
apache-2.0
|
flewrain/flewrain-dolphin
|
Externals/scons-local/scons-local-2.0.1/SCons/Tool/qt.py
|
61
|
13252
|
"""SCons.Tool.qt
Tool-specific initialization for Qt.
There normally shouldn't be any need to import this module directly.
It will usually be imported through the generic SCons.Tool.Tool()
selection method.
"""
#
# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Tool/qt.py 5134 2010/08/16 23:02:40 bdeegan"
import os.path
import re
import SCons.Action
import SCons.Builder
import SCons.Defaults
import SCons.Scanner
import SCons.Tool
import SCons.Util
class ToolQtWarning(SCons.Warnings.Warning):
pass
class GeneratedMocFileNotIncluded(ToolQtWarning):
pass
class QtdirNotFound(ToolQtWarning):
pass
SCons.Warnings.enableWarningClass(ToolQtWarning)
header_extensions = [".h", ".hxx", ".hpp", ".hh"]
if SCons.Util.case_sensitive_suffixes('.h', '.H'):
header_extensions.append('.H')
cplusplus = __import__('c++', globals(), locals(), [])
cxx_suffixes = cplusplus.CXXSuffixes
def checkMocIncluded(target, source, env):
moc = target[0]
cpp = source[0]
# looks like cpp.includes is cleared before the build stage :-(
# not really sure about the path transformations (moc.cwd? cpp.cwd?) :-/
path = SCons.Defaults.CScan.path(env, moc.cwd)
includes = SCons.Defaults.CScan(cpp, env, path)
if not moc in includes:
SCons.Warnings.warn(
GeneratedMocFileNotIncluded,
"Generated moc file '%s' is not included by '%s'" %
(str(moc), str(cpp)))
def find_file(filename, paths, node_factory):
for dir in paths:
node = node_factory(filename, dir)
if node.rexists():
return node
return None
class _Automoc(object):
"""
Callable class, which works as an emitter for Programs, SharedLibraries and
StaticLibraries.
"""
def __init__(self, objBuilderName):
self.objBuilderName = objBuilderName
def __call__(self, target, source, env):
"""
Smart autoscan function. Gets the list of objects for the Program
or Lib. Adds objects and builders for the special qt files.
"""
try:
if int(env.subst('$QT_AUTOSCAN')) == 0:
return target, source
except ValueError:
pass
try:
debug = int(env.subst('$QT_DEBUG'))
except ValueError:
debug = 0
# some shortcuts used in the scanner
splitext = SCons.Util.splitext
objBuilder = getattr(env, self.objBuilderName)
# some regular expressions:
# Q_OBJECT detection
q_object_search = re.compile(r'[^A-Za-z0-9]Q_OBJECT[^A-Za-z0-9]')
# cxx and c comment 'eater'
#comment = re.compile(r'(//.*)|(/\*(([^*])|(\*[^/]))*\*/)')
# CW: something must be wrong with the regexp. See also bug #998222
# CURRENTLY THERE IS NO TEST CASE FOR THAT
# The following is kind of hacky to get builders working properly (FIXME)
objBuilderEnv = objBuilder.env
objBuilder.env = env
mocBuilderEnv = env.Moc.env
env.Moc.env = env
# make a deep copy for the result; MocH objects will be appended
out_sources = source[:]
for obj in source:
if not obj.has_builder():
# binary obj file provided
if debug:
print "scons: qt: '%s' seems to be a binary. Discarded." % str(obj)
continue
cpp = obj.sources[0]
if not splitext(str(cpp))[1] in cxx_suffixes:
if debug:
print "scons: qt: '%s' is no cxx file. Discarded." % str(cpp)
# c or fortran source
continue
#cpp_contents = comment.sub('', cpp.get_text_contents())
cpp_contents = cpp.get_text_contents()
h=None
for h_ext in header_extensions:
# try to find the header file in the corresponding source
# directory
hname = splitext(cpp.name)[0] + h_ext
h = find_file(hname, (cpp.get_dir(),), env.File)
if h:
if debug:
print "scons: qt: Scanning '%s' (header of '%s')" % (str(h), str(cpp))
#h_contents = comment.sub('', h.get_text_contents())
h_contents = h.get_text_contents()
break
if not h and debug:
print "scons: qt: no header for '%s'." % (str(cpp))
if h and q_object_search.search(h_contents):
# h file with the Q_OBJECT macro found -> add moc_cpp
moc_cpp = env.Moc(h)
moc_o = objBuilder(moc_cpp)
out_sources.append(moc_o)
#moc_cpp.target_scanner = SCons.Defaults.CScan
if debug:
print "scons: qt: found Q_OBJECT macro in '%s', moc'ing to '%s'" % (str(h), str(moc_cpp))
if cpp and q_object_search.search(cpp_contents):
# cpp file with Q_OBJECT macro found -> add moc
# (to be included in cpp)
moc = env.Moc(cpp)
env.Ignore(moc, moc)
if debug:
print "scons: qt: found Q_OBJECT macro in '%s', moc'ing to '%s'" % (str(cpp), str(moc))
#moc.source_scanner = SCons.Defaults.CScan
# restore the original env attributes (FIXME)
objBuilder.env = objBuilderEnv
env.Moc.env = mocBuilderEnv
return (target, out_sources)
AutomocShared = _Automoc('SharedObject')
AutomocStatic = _Automoc('StaticObject')
def _detect(env):
"""Not really safe, but fast method to detect the QT library"""
QTDIR = None
if not QTDIR:
QTDIR = env.get('QTDIR',None)
if not QTDIR:
QTDIR = os.environ.get('QTDIR',None)
if not QTDIR:
moc = env.WhereIs('moc')
if moc:
QTDIR = os.path.dirname(os.path.dirname(moc))
SCons.Warnings.warn(
QtdirNotFound,
"Could not detect qt, using moc executable as a hint (QTDIR=%s)" % QTDIR)
else:
QTDIR = None
SCons.Warnings.warn(
QtdirNotFound,
"Could not detect qt, using empty QTDIR")
return QTDIR
def uicEmitter(target, source, env):
adjustixes = SCons.Util.adjustixes
bs = SCons.Util.splitext(str(source[0].name))[0]
bs = os.path.join(str(target[0].get_dir()),bs)
# first target (header) is automatically added by builder
if len(target) < 2:
# second target is implementation
target.append(adjustixes(bs,
env.subst('$QT_UICIMPLPREFIX'),
env.subst('$QT_UICIMPLSUFFIX')))
if len(target) < 3:
# third target is moc file
target.append(adjustixes(bs,
env.subst('$QT_MOCHPREFIX'),
env.subst('$QT_MOCHSUFFIX')))
return target, source
def uicScannerFunc(node, env, path):
lookout = []
lookout.extend(env['CPPPATH'])
lookout.append(str(node.rfile().dir))
includes = re.findall("<include.*?>(.*?)</include>", node.get_text_contents())
result = []
for incFile in includes:
dep = env.FindFile(incFile,lookout)
if dep:
result.append(dep)
return result
uicScanner = SCons.Scanner.Base(uicScannerFunc,
name = "UicScanner",
node_class = SCons.Node.FS.File,
node_factory = SCons.Node.FS.File,
recursive = 0)
def generate(env):
"""Add Builders and construction variables for qt to an Environment."""
CLVar = SCons.Util.CLVar
Action = SCons.Action.Action
Builder = SCons.Builder.Builder
env.SetDefault(QTDIR = _detect(env),
QT_BINPATH = os.path.join('$QTDIR', 'bin'),
QT_CPPPATH = os.path.join('$QTDIR', 'include'),
QT_LIBPATH = os.path.join('$QTDIR', 'lib'),
QT_MOC = os.path.join('$QT_BINPATH','moc'),
QT_UIC = os.path.join('$QT_BINPATH','uic'),
QT_LIB = 'qt', # may be set to qt-mt
QT_AUTOSCAN = 1, # scan for moc'able sources
# Some QT specific flags. I don't expect someone wants to
# manipulate those ...
QT_UICIMPLFLAGS = CLVar(''),
QT_UICDECLFLAGS = CLVar(''),
QT_MOCFROMHFLAGS = CLVar(''),
QT_MOCFROMCXXFLAGS = CLVar('-i'),
# suffixes/prefixes for the headers / sources to generate
QT_UICDECLPREFIX = '',
QT_UICDECLSUFFIX = '.h',
QT_UICIMPLPREFIX = 'uic_',
QT_UICIMPLSUFFIX = '$CXXFILESUFFIX',
QT_MOCHPREFIX = 'moc_',
QT_MOCHSUFFIX = '$CXXFILESUFFIX',
QT_MOCCXXPREFIX = '',
QT_MOCCXXSUFFIX = '.moc',
QT_UISUFFIX = '.ui',
# Commands for the qt support ...
# command to generate header, implementation and moc-file
# from a .ui file
QT_UICCOM = [
CLVar('$QT_UIC $QT_UICDECLFLAGS -o ${TARGETS[0]} $SOURCE'),
CLVar('$QT_UIC $QT_UICIMPLFLAGS -impl ${TARGETS[0].file} '
'-o ${TARGETS[1]} $SOURCE'),
CLVar('$QT_MOC $QT_MOCFROMHFLAGS -o ${TARGETS[2]} ${TARGETS[0]}')],
# command to generate meta object information for a class
# declarated in a header
QT_MOCFROMHCOM = (
'$QT_MOC $QT_MOCFROMHFLAGS -o ${TARGETS[0]} $SOURCE'),
# command to generate meta object information for a class
# declarated in a cpp file
QT_MOCFROMCXXCOM = [
CLVar('$QT_MOC $QT_MOCFROMCXXFLAGS -o ${TARGETS[0]} $SOURCE'),
Action(checkMocIncluded,None)])
# ... and the corresponding builders
uicBld = Builder(action=SCons.Action.Action('$QT_UICCOM', '$QT_UICCOMSTR'),
emitter=uicEmitter,
src_suffix='$QT_UISUFFIX',
suffix='$QT_UICDECLSUFFIX',
prefix='$QT_UICDECLPREFIX',
source_scanner=uicScanner)
mocBld = Builder(action={}, prefix={}, suffix={})
for h in header_extensions:
act = SCons.Action.Action('$QT_MOCFROMHCOM', '$QT_MOCFROMHCOMSTR')
mocBld.add_action(h, act)
mocBld.prefix[h] = '$QT_MOCHPREFIX'
mocBld.suffix[h] = '$QT_MOCHSUFFIX'
for cxx in cxx_suffixes:
act = SCons.Action.Action('$QT_MOCFROMCXXCOM', '$QT_MOCFROMCXXCOMSTR')
mocBld.add_action(cxx, act)
mocBld.prefix[cxx] = '$QT_MOCCXXPREFIX'
mocBld.suffix[cxx] = '$QT_MOCCXXSUFFIX'
# register the builders
env['BUILDERS']['Uic'] = uicBld
env['BUILDERS']['Moc'] = mocBld
static_obj, shared_obj = SCons.Tool.createObjBuilders(env)
static_obj.add_src_builder('Uic')
shared_obj.add_src_builder('Uic')
# We use the emitters of Program / StaticLibrary / SharedLibrary
# to scan for moc'able files
# We can't refer to the builders directly, we have to fetch them
# as Environment attributes because that sets them up to be called
# correctly later by our emitter.
env.AppendUnique(PROGEMITTER =[AutomocStatic],
SHLIBEMITTER=[AutomocShared],
LIBEMITTER =[AutomocStatic],
# Of course, we need to link against the qt libraries
CPPPATH=["$QT_CPPPATH"],
LIBPATH=["$QT_LIBPATH"],
LIBS=['$QT_LIB'])
def exists(env):
return _detect(env)
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
|
gpl-2.0
|
dga4654dan/UTM-Demo
|
V_1_0_1/UtmDemo_Sfs_2.9.0/UtmDemo_Sfs_2.9.0_Server/lib/Lib/encodings/mac_greek.py
|
9
|
6699
|
""" Python Character Mapping Codec generated from 'GREEK.TXT' with gencodec.py.
Written by Marc-Andre Lemburg ([email protected]).
(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
(c) Copyright 2000 Guido van Rossum.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_map)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_map)
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return (Codec().encode,Codec().decode,StreamReader,StreamWriter)
### Decoding Map
decoding_map = codecs.make_identity_dict(range(256))
decoding_map.update({
0x0080: 0x00c4, # LATIN CAPITAL LETTER A WITH DIAERESIS
0x0081: 0x00b9, # SUPERSCRIPT ONE
0x0082: 0x00b2, # SUPERSCRIPT TWO
0x0083: 0x00c9, # LATIN CAPITAL LETTER E WITH ACUTE
0x0084: 0x00b3, # SUPERSCRIPT THREE
0x0085: 0x00d6, # LATIN CAPITAL LETTER O WITH DIAERESIS
0x0086: 0x00dc, # LATIN CAPITAL LETTER U WITH DIAERESIS
0x0087: 0x0385, # GREEK DIALYTIKA TONOS
0x0088: 0x00e0, # LATIN SMALL LETTER A WITH GRAVE
0x0089: 0x00e2, # LATIN SMALL LETTER A WITH CIRCUMFLEX
0x008a: 0x00e4, # LATIN SMALL LETTER A WITH DIAERESIS
0x008b: 0x0384, # GREEK TONOS
0x008c: 0x00a8, # DIAERESIS
0x008d: 0x00e7, # LATIN SMALL LETTER C WITH CEDILLA
0x008e: 0x00e9, # LATIN SMALL LETTER E WITH ACUTE
0x008f: 0x00e8, # LATIN SMALL LETTER E WITH GRAVE
0x0090: 0x00ea, # LATIN SMALL LETTER E WITH CIRCUMFLEX
0x0091: 0x00eb, # LATIN SMALL LETTER E WITH DIAERESIS
0x0092: 0x00a3, # POUND SIGN
0x0093: 0x2122, # TRADE MARK SIGN
0x0094: 0x00ee, # LATIN SMALL LETTER I WITH CIRCUMFLEX
0x0095: 0x00ef, # LATIN SMALL LETTER I WITH DIAERESIS
0x0096: 0x2022, # BULLET
0x0097: 0x00bd, # VULGAR FRACTION ONE HALF
0x0098: 0x2030, # PER MILLE SIGN
0x0099: 0x00f4, # LATIN SMALL LETTER O WITH CIRCUMFLEX
0x009a: 0x00f6, # LATIN SMALL LETTER O WITH DIAERESIS
0x009b: 0x00a6, # BROKEN BAR
0x009c: 0x00ad, # SOFT HYPHEN
0x009d: 0x00f9, # LATIN SMALL LETTER U WITH GRAVE
0x009e: 0x00fb, # LATIN SMALL LETTER U WITH CIRCUMFLEX
0x009f: 0x00fc, # LATIN SMALL LETTER U WITH DIAERESIS
0x00a0: 0x2020, # DAGGER
0x00a1: 0x0393, # GREEK CAPITAL LETTER GAMMA
0x00a2: 0x0394, # GREEK CAPITAL LETTER DELTA
0x00a3: 0x0398, # GREEK CAPITAL LETTER THETA
0x00a4: 0x039b, # GREEK CAPITAL LETTER LAMBDA
0x00a5: 0x039e, # GREEK CAPITAL LETTER XI
0x00a6: 0x03a0, # GREEK CAPITAL LETTER PI
0x00a7: 0x00df, # LATIN SMALL LETTER SHARP S
0x00a8: 0x00ae, # REGISTERED SIGN
0x00aa: 0x03a3, # GREEK CAPITAL LETTER SIGMA
0x00ab: 0x03aa, # GREEK CAPITAL LETTER IOTA WITH DIALYTIKA
0x00ac: 0x00a7, # SECTION SIGN
0x00ad: 0x2260, # NOT EQUAL TO
0x00ae: 0x00b0, # DEGREE SIGN
0x00af: 0x0387, # GREEK ANO TELEIA
0x00b0: 0x0391, # GREEK CAPITAL LETTER ALPHA
0x00b2: 0x2264, # LESS-THAN OR EQUAL TO
0x00b3: 0x2265, # GREATER-THAN OR EQUAL TO
0x00b4: 0x00a5, # YEN SIGN
0x00b5: 0x0392, # GREEK CAPITAL LETTER BETA
0x00b6: 0x0395, # GREEK CAPITAL LETTER EPSILON
0x00b7: 0x0396, # GREEK CAPITAL LETTER ZETA
0x00b8: 0x0397, # GREEK CAPITAL LETTER ETA
0x00b9: 0x0399, # GREEK CAPITAL LETTER IOTA
0x00ba: 0x039a, # GREEK CAPITAL LETTER KAPPA
0x00bb: 0x039c, # GREEK CAPITAL LETTER MU
0x00bc: 0x03a6, # GREEK CAPITAL LETTER PHI
0x00bd: 0x03ab, # GREEK CAPITAL LETTER UPSILON WITH DIALYTIKA
0x00be: 0x03a8, # GREEK CAPITAL LETTER PSI
0x00bf: 0x03a9, # GREEK CAPITAL LETTER OMEGA
0x00c0: 0x03ac, # GREEK SMALL LETTER ALPHA WITH TONOS
0x00c1: 0x039d, # GREEK CAPITAL LETTER NU
0x00c2: 0x00ac, # NOT SIGN
0x00c3: 0x039f, # GREEK CAPITAL LETTER OMICRON
0x00c4: 0x03a1, # GREEK CAPITAL LETTER RHO
0x00c5: 0x2248, # ALMOST EQUAL TO
0x00c6: 0x03a4, # GREEK CAPITAL LETTER TAU
0x00c7: 0x00ab, # LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
0x00c8: 0x00bb, # RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
0x00c9: 0x2026, # HORIZONTAL ELLIPSIS
0x00ca: 0x00a0, # NO-BREAK SPACE
0x00cb: 0x03a5, # GREEK CAPITAL LETTER UPSILON
0x00cc: 0x03a7, # GREEK CAPITAL LETTER CHI
0x00cd: 0x0386, # GREEK CAPITAL LETTER ALPHA WITH TONOS
0x00ce: 0x0388, # GREEK CAPITAL LETTER EPSILON WITH TONOS
0x00cf: 0x0153, # LATIN SMALL LIGATURE OE
0x00d0: 0x2013, # EN DASH
0x00d1: 0x2015, # HORIZONTAL BAR
0x00d2: 0x201c, # LEFT DOUBLE QUOTATION MARK
0x00d3: 0x201d, # RIGHT DOUBLE QUOTATION MARK
0x00d4: 0x2018, # LEFT SINGLE QUOTATION MARK
0x00d5: 0x2019, # RIGHT SINGLE QUOTATION MARK
0x00d6: 0x00f7, # DIVISION SIGN
0x00d7: 0x0389, # GREEK CAPITAL LETTER ETA WITH TONOS
0x00d8: 0x038a, # GREEK CAPITAL LETTER IOTA WITH TONOS
0x00d9: 0x038c, # GREEK CAPITAL LETTER OMICRON WITH TONOS
0x00da: 0x038e, # GREEK CAPITAL LETTER UPSILON WITH TONOS
0x00db: 0x03ad, # GREEK SMALL LETTER EPSILON WITH TONOS
0x00dc: 0x03ae, # GREEK SMALL LETTER ETA WITH TONOS
0x00dd: 0x03af, # GREEK SMALL LETTER IOTA WITH TONOS
0x00de: 0x03cc, # GREEK SMALL LETTER OMICRON WITH TONOS
0x00df: 0x038f, # GREEK CAPITAL LETTER OMEGA WITH TONOS
0x00e0: 0x03cd, # GREEK SMALL LETTER UPSILON WITH TONOS
0x00e1: 0x03b1, # GREEK SMALL LETTER ALPHA
0x00e2: 0x03b2, # GREEK SMALL LETTER BETA
0x00e3: 0x03c8, # GREEK SMALL LETTER PSI
0x00e4: 0x03b4, # GREEK SMALL LETTER DELTA
0x00e5: 0x03b5, # GREEK SMALL LETTER EPSILON
0x00e6: 0x03c6, # GREEK SMALL LETTER PHI
0x00e7: 0x03b3, # GREEK SMALL LETTER GAMMA
0x00e8: 0x03b7, # GREEK SMALL LETTER ETA
0x00e9: 0x03b9, # GREEK SMALL LETTER IOTA
0x00ea: 0x03be, # GREEK SMALL LETTER XI
0x00eb: 0x03ba, # GREEK SMALL LETTER KAPPA
0x00ec: 0x03bb, # GREEK SMALL LETTER LAMBDA
0x00ed: 0x03bc, # GREEK SMALL LETTER MU
0x00ee: 0x03bd, # GREEK SMALL LETTER NU
0x00ef: 0x03bf, # GREEK SMALL LETTER OMICRON
0x00f0: 0x03c0, # GREEK SMALL LETTER PI
0x00f1: 0x03ce, # GREEK SMALL LETTER OMEGA WITH TONOS
0x00f2: 0x03c1, # GREEK SMALL LETTER RHO
0x00f3: 0x03c3, # GREEK SMALL LETTER SIGMA
0x00f4: 0x03c4, # GREEK SMALL LETTER TAU
0x00f5: 0x03b8, # GREEK SMALL LETTER THETA
0x00f6: 0x03c9, # GREEK SMALL LETTER OMEGA
0x00f7: 0x03c2, # GREEK SMALL LETTER FINAL SIGMA
0x00f8: 0x03c7, # GREEK SMALL LETTER CHI
0x00f9: 0x03c5, # GREEK SMALL LETTER UPSILON
0x00fa: 0x03b6, # GREEK SMALL LETTER ZETA
0x00fb: 0x03ca, # GREEK SMALL LETTER IOTA WITH DIALYTIKA
0x00fc: 0x03cb, # GREEK SMALL LETTER UPSILON WITH DIALYTIKA
0x00fd: 0x0390, # GREEK SMALL LETTER IOTA WITH DIALYTIKA AND TONOS
0x00fe: 0x03b0, # GREEK SMALL LETTER UPSILON WITH DIALYTIKA AND TONOS
0x00ff: None, # UNDEFINED
})
### Encoding Map
encoding_map = codecs.make_encoding_map(decoding_map)
|
gpl-2.0
|
hdinsight/hue
|
desktop/core/ext-py/tablib-0.10.0/tablib/packages/odf/script.py
|
100
|
1104
|
# -*- coding: utf-8 -*-
# Copyright (C) 2006-2007 Søren Roug, European Environment Agency
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# Contributor(s):
#
from namespaces import SCRIPTNS
from element import Element
# ODF 1.0 section 12.4.1
# The <script:event-listener> element binds an event to a macro.
# Autogenerated
def EventListener(**args):
return Element(qname = (SCRIPTNS,'event-listener'), **args)
|
apache-2.0
|
hpproliant/ironic
|
ironic/common/keystone.py
|
4
|
5539
|
# coding=utf-8
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from keystoneclient import exceptions as ksexception
from oslo_concurrency import lockutils
from oslo_config import cfg
from six.moves.urllib import parse
from ironic.common import exception
from ironic.common.i18n import _
CONF = cfg.CONF
keystone_opts = [
cfg.StrOpt('region_name',
help=_('The region used for getting endpoints of OpenStack'
' services.')),
]
CONF.register_opts(keystone_opts, group='keystone')
CONF.import_group('keystone_authtoken', 'keystonemiddleware.auth_token')
_KS_CLIENT = None
def _is_apiv3(auth_url, auth_version):
"""Checks if V3 version of API is being used or not.
This method inspects auth_url and auth_version, and checks whether V3
version of the API is being used or not.
:param auth_url: a http or https url to be inspected (like
'http://127.0.0.1:9898/').
:param auth_version: a string containing the version (like 'v2', 'v3.0')
:returns: True if V3 of the API is being used.
"""
return auth_version == 'v3.0' or '/v3' in parse.urlparse(auth_url).path
def _get_ksclient(token=None):
auth_url = CONF.keystone_authtoken.auth_uri
if not auth_url:
raise exception.KeystoneFailure(_('Keystone API endpoint is missing'))
auth_version = CONF.keystone_authtoken.auth_version
api_v3 = _is_apiv3(auth_url, auth_version)
if api_v3:
from keystoneclient.v3 import client
else:
from keystoneclient.v2_0 import client
auth_url = get_keystone_url(auth_url, auth_version)
try:
if token:
return client.Client(token=token, auth_url=auth_url)
else:
params = {'username': CONF.keystone_authtoken.admin_user,
'password': CONF.keystone_authtoken.admin_password,
'tenant_name': CONF.keystone_authtoken.admin_tenant_name,
'region_name': CONF.keystone.region_name,
'auth_url': auth_url}
return _get_ksclient_from_conf(client, **params)
except ksexception.Unauthorized:
raise exception.KeystoneUnauthorized()
except ksexception.AuthorizationFailure as err:
raise exception.KeystoneFailure(_('Could not authorize in Keystone:'
' %s') % err)
@lockutils.synchronized('keystone_client', 'ironic-')
def _get_ksclient_from_conf(client, **params):
global _KS_CLIENT
# NOTE(yuriyz): use Keystone client default gap, to determine whether the
# given token is about to expire
if _KS_CLIENT is None or _KS_CLIENT.auth_ref.will_expire_soon():
_KS_CLIENT = client.Client(**params)
return _KS_CLIENT
def get_keystone_url(auth_url, auth_version):
"""Gives an http/https url to contact keystone.
Given an auth_url and auth_version, this method generates the url in
which keystone can be reached.
:param auth_url: a http or https url to be inspected (like
'http://127.0.0.1:9898/').
:param auth_version: a string containing the version (like v2, v3.0, etc)
:returns: a string containing the keystone url
"""
api_v3 = _is_apiv3(auth_url, auth_version)
api_version = 'v3' if api_v3 else 'v2.0'
# NOTE(lucasagomes): Get rid of the trailing '/' otherwise urljoin()
# fails to override the version in the URL
return parse.urljoin(auth_url.rstrip('/'), api_version)
def get_service_url(service_type='baremetal', endpoint_type='internal'):
"""Wrapper for get service url from keystone service catalog.
Given a service_type and an endpoint_type, this method queries keystone
service catalog and provides the url for the desired endpoint.
:param service_type: the keystone service for which url is required.
:param endpoint_type: the type of endpoint for the service.
:returns: an http/https url for the desired endpoint.
"""
ksclient = _get_ksclient()
if not ksclient.has_service_catalog():
raise exception.KeystoneFailure(_('No Keystone service catalog '
'loaded'))
try:
endpoint = ksclient.service_catalog.url_for(
service_type=service_type,
endpoint_type=endpoint_type,
region_name=CONF.keystone.region_name)
except ksexception.EndpointNotFound:
raise exception.CatalogNotFound(service_type=service_type,
endpoint_type=endpoint_type)
return endpoint
def get_admin_auth_token():
"""Get an admin auth_token from the Keystone."""
ksclient = _get_ksclient()
return ksclient.auth_token
def token_expires_soon(token, duration=None):
"""Determines if token expiration is about to occur.
:param duration: time interval in seconds
:returns: boolean : true if expiration is within the given duration
"""
ksclient = _get_ksclient(token=token)
return ksclient.auth_ref.will_expire_soon(stale_duration=duration)
|
apache-2.0
|
ccn-2m/django
|
tests/regressiontests/modeladmin/models.py
|
60
|
1514
|
# coding: utf-8
from django.contrib.auth.models import User
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class Band(models.Model):
name = models.CharField(max_length=100)
bio = models.TextField()
sign_date = models.DateField()
class Meta:
ordering = ('name',)
def __str__(self):
return self.name
class Concert(models.Model):
main_band = models.ForeignKey(Band, related_name='main_concerts')
opening_band = models.ForeignKey(Band, related_name='opening_concerts',
blank=True)
day = models.CharField(max_length=3, choices=((1, 'Fri'), (2, 'Sat')))
transport = models.CharField(max_length=100, choices=(
(1, 'Plane'),
(2, 'Train'),
(3, 'Bus')
), blank=True)
class ValidationTestModel(models.Model):
name = models.CharField(max_length=100)
slug = models.SlugField()
users = models.ManyToManyField(User)
state = models.CharField(max_length=2, choices=(("CO", "Colorado"), ("WA", "Washington")))
is_active = models.BooleanField()
pub_date = models.DateTimeField()
band = models.ForeignKey(Band)
no = models.IntegerField(verbose_name="Number", blank=True, null=True) # This field is intentionally 2 characters long. See #16080.
def decade_published_in(self):
return self.pub_date.strftime('%Y')[:3] + "0's"
class ValidationTestInlineModel(models.Model):
parent = models.ForeignKey(ValidationTestModel)
|
bsd-3-clause
|
azaghal/ansible
|
lib/ansible/plugins/lookup/vars.py
|
9
|
3230
|
# (c) 2017 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = """
lookup: vars
author: Ansible Core
version_added: "2.5"
short_description: Lookup templated value of variables
description:
- 'Retrieves the value of an Ansible variable. Note: Only returns top level variable names.'
options:
_terms:
description: The variable names to look up.
required: True
default:
description:
- What to return if a variable is undefined.
- If no default is set, it will result in an error if any of the variables is undefined.
"""
EXAMPLES = """
- name: Show value of 'variablename'
debug: msg="{{ lookup('vars', 'variabl' + myvar) }}"
vars:
variablename: hello
myvar: ename
- name: Show default empty since i dont have 'variablnotename'
debug: msg="{{ lookup('vars', 'variabl' + myvar, default='')}}"
vars:
variablename: hello
myvar: notename
- name: Produce an error since i dont have 'variablnotename'
debug: msg="{{ lookup('vars', 'variabl' + myvar)}}"
ignore_errors: True
vars:
variablename: hello
myvar: notename
- name: find several related variables
debug: msg="{{ lookup('vars', 'ansible_play_hosts', 'ansible_play_batch', 'ansible_play_hosts_all') }}"
- name: Access nested variables
debug: msg="{{ lookup('vars', 'variabl' + myvar).sub_var }}"
ignore_errors: True
vars:
variablename:
sub_var: 12
myvar: ename
- name: alternate way to find some 'prefixed vars' in loop
debug: msg="{{ lookup('vars', 'ansible_play_' + item) }}"
loop:
- hosts
- batch
- hosts_all
"""
RETURN = """
_value:
description:
- value of the variables requested.
"""
from ansible.errors import AnsibleError, AnsibleUndefinedVariable
from ansible.module_utils.six import string_types
from ansible.plugins.lookup import LookupBase
class LookupModule(LookupBase):
def run(self, terms, variables=None, **kwargs):
if variables is not None:
self._templar.available_variables = variables
myvars = getattr(self._templar, '_available_variables', {})
self.set_options(direct=kwargs)
default = self.get_option('default')
ret = []
for term in terms:
if not isinstance(term, string_types):
raise AnsibleError('Invalid setting identifier, "%s" is not a string, its a %s' % (term, type(term)))
try:
try:
value = myvars[term]
except KeyError:
try:
value = myvars['hostvars'][myvars['inventory_hostname']][term]
except KeyError:
raise AnsibleUndefinedVariable('No variable found with this name: %s' % term)
ret.append(self._templar.template(value, fail_on_undefined=True))
except AnsibleUndefinedVariable:
if default is not None:
ret.append(default)
else:
raise
return ret
|
gpl-3.0
|
makinux/PyCsmap
|
CSMapMake.py
|
1
|
2664
|
# -*- coding: utf-8 -*
from PIL import Image
import os
import argparse
import requests
import math
from CSMap import CSMap
parser = argparse.ArgumentParser(description='MyScript')
parser.add_argument('images_x_start', type=int)
parser.add_argument('images_x_end', type=int)
parser.add_argument('images_y_start', type=int)
parser.add_argument('images_y_end', type=int)
parser.add_argument('zoom_level', type=int)
parser.add_argument('--outputPath', default="./")
args = parser.parse_args()
TILE_SIZE = 256
INPUT_URL = "http://cyberjapandata.gsi.go.jp/xyz/dem/"
OUTPUT_PATH = os.path.join(os.getcwd(),args.outputPath)
if not os.path.isdir(OUTPUT_PATH):
os.makedirs(OUTPUT_PATH)
FILE_NAME = "CSMAP_%d-%d_%d-%d_%d.png" % (args.images_x_start, args.images_x_end, args.images_y_start, args.images_y_end, args.zoom_level)
def demtofloat(n):
if n == 'e':
return 0.0
else:
return float(n)
def csmap_make(images_x_start, images_x_end, images_y_start, images_y_end, zoom_level):
size_x = TILE_SIZE * (images_x_end - images_x_start + 1)
size_y = TILE_SIZE * (images_y_end - images_y_start + 1)
cs_img = Image.new('RGBA', (size_x, size_y), (0, 0, 0, 0))
for i in range(images_x_start, images_x_end + 1):
for j in range(images_y_start, images_y_end + 1):
input_image_url = INPUT_URL + str(zoom_level) + '/' + str(i) + '/' + str(j) + '.txt'
print 'input : ' + input_image_url
res = requests.get(input_image_url, stream=True)
if (res.status_code == 200):
file = res.text
file = file.split()
dem_tmp = []
for item in file:
dem_tmp.append(map(demtofloat, item.split(',')))
dem = dem_tmp
unit = 10 * math.pow(2, 14 - min(zoom_level, 14))
cs_map = CSMap(dem, unit, image_size=[TILE_SIZE, TILE_SIZE])
input_img_p = cs_map.cs_draw()
print("Get tile : %d - %d - %d" % (zoom_level, i, j))
else:
input_img_p = Image.new('RGB', (TILE_SIZE, TILE_SIZE), (0, 0, 0))
print("Can't get tile : %d - %d - %d" % (zoom_level, i, j))
cs_img.paste(input_img_p, ((i - images_x_start) * TILE_SIZE, (j - images_y_start) * TILE_SIZE))
return cs_img
cs_img = csmap_make(args.images_x_start, args.images_x_end, args.images_y_start, args.images_y_end, args.zoom_level)
cs_img.save(os.path.join(OUTPUT_PATH, FILE_NAME))
print "Image output : " + os.path.join(OUTPUT_PATH, FILE_NAME)
|
mit
|
vmax-feihu/hue
|
desktop/core/ext-py/lxml-3.3.6/src/lxml/tests/test_relaxng.py
|
16
|
5736
|
# -*- coding: utf-8 -*-
"""
Test cases related to RelaxNG parsing and validation
"""
import unittest, sys, os.path
this_dir = os.path.dirname(__file__)
if this_dir not in sys.path:
sys.path.insert(0, this_dir) # needed for Py3
from common_imports import etree, BytesIO, _bytes, HelperTestCase, fileInTestDir
from common_imports import doctest, make_doctest
class ETreeRelaxNGTestCase(HelperTestCase):
def test_relaxng(self):
tree_valid = self.parse('<a><b></b></a>')
tree_invalid = self.parse('<a><c></c></a>')
schema = self.parse('''\
<element name="a" xmlns="http://relaxng.org/ns/structure/1.0">
<zeroOrMore>
<element name="b">
<text />
</element>
</zeroOrMore>
</element>
''')
schema = etree.RelaxNG(schema)
self.assertTrue(schema.validate(tree_valid))
self.assertFalse(schema.error_log.filter_from_errors())
self.assertFalse(schema.validate(tree_invalid))
self.assertTrue(schema.error_log.filter_from_errors())
self.assertTrue(schema.validate(tree_valid)) # repeat valid
self.assertFalse(schema.error_log.filter_from_errors()) # repeat valid
def test_relaxng_stringio(self):
tree_valid = self.parse('<a><b></b></a>')
tree_invalid = self.parse('<a><c></c></a>')
schema_file = BytesIO('''\
<element name="a" xmlns="http://relaxng.org/ns/structure/1.0">
<zeroOrMore>
<element name="b">
<text />
</element>
</zeroOrMore>
</element>
''')
schema = etree.RelaxNG(file=schema_file)
self.assertTrue(schema.validate(tree_valid))
self.assertFalse(schema.validate(tree_invalid))
def test_relaxng_elementtree_error(self):
self.assertRaises(ValueError, etree.RelaxNG, etree.ElementTree())
def test_relaxng_error(self):
tree_invalid = self.parse('<a><c></c></a>')
schema = self.parse('''\
<element name="a" xmlns="http://relaxng.org/ns/structure/1.0">
<zeroOrMore>
<element name="b">
<text />
</element>
</zeroOrMore>
</element>
''')
schema = etree.RelaxNG(schema)
self.assertFalse(schema.validate(tree_invalid))
errors = schema.error_log
self.assertTrue([log for log in errors
if log.level_name == "ERROR"])
self.assertTrue([log for log in errors
if "not expect" in log.message])
def test_relaxng_invalid_schema(self):
schema = self.parse('''\
<element name="a" xmlns="http://relaxng.org/ns/structure/1.0">
<zeroOrMore>
<element name="b" />
</zeroOrMore>
</element>
''')
self.assertRaises(etree.RelaxNGParseError,
etree.RelaxNG, schema)
def test_relaxng_invalid_schema2(self):
schema = self.parse('''\
<grammar xmlns="http://relaxng.org/ns/structure/1.0" />
''')
self.assertRaises(etree.RelaxNGParseError,
etree.RelaxNG, schema)
def test_relaxng_invalid_schema3(self):
schema = self.parse('''\
<grammar xmlns="http://relaxng.org/ns/structure/1.0">
<define name="test">
<element name="test"/>
</define>
</grammar>
''')
self.assertRaises(etree.RelaxNGParseError,
etree.RelaxNG, schema)
def test_relaxng_invalid_schema4(self):
# segfault
schema = self.parse('''\
<element name="a" xmlns="mynamespace" />
''')
self.assertRaises(etree.RelaxNGParseError,
etree.RelaxNG, schema)
def test_relaxng_include(self):
# this will only work if we access the file through path or
# file object..
f = open(fileInTestDir('test1.rng'), 'rb')
try:
schema = etree.RelaxNG(file=f)
finally:
f.close()
def test_relaxng_shortcut(self):
tree_valid = self.parse('<a><b></b></a>')
tree_invalid = self.parse('<a><c></c></a>')
schema = self.parse('''\
<element name="a" xmlns="http://relaxng.org/ns/structure/1.0">
<zeroOrMore>
<element name="b">
<text />
</element>
</zeroOrMore>
</element>
''')
self.assertTrue(tree_valid.relaxng(schema))
self.assertFalse(tree_invalid.relaxng(schema))
def test_multiple_elementrees(self):
tree = self.parse('<a><b>B</b><c>C</c></a>')
schema = etree.RelaxNG( self.parse('''\
<element name="a" xmlns="http://relaxng.org/ns/structure/1.0">
<element name="b">
<text />
</element>
<element name="c">
<text />
</element>
</element>
''') )
self.assertTrue(schema.validate(tree))
self.assertFalse(schema.error_log.filter_from_errors())
self.assertTrue(schema.validate(tree)) # repeat valid
self.assertFalse(schema.error_log.filter_from_errors()) # repeat valid
schema = etree.RelaxNG( self.parse('''\
<element name="b" xmlns="http://relaxng.org/ns/structure/1.0">
<text />
</element>
''') )
c_tree = etree.ElementTree(tree.getroot()[1])
self.assertEqual(self._rootstring(c_tree), _bytes('<c>C</c>'))
self.assertFalse(schema.validate(c_tree))
self.assertTrue(schema.error_log.filter_from_errors())
b_tree = etree.ElementTree(tree.getroot()[0])
self.assertEqual(self._rootstring(b_tree), _bytes('<b>B</b>'))
self.assertTrue(schema.validate(b_tree))
self.assertFalse(schema.error_log.filter_from_errors())
def test_suite():
suite = unittest.TestSuite()
suite.addTests([unittest.makeSuite(ETreeRelaxNGTestCase)])
suite.addTests(
[make_doctest('../../../doc/validation.txt')])
return suite
if __name__ == '__main__':
print('to test use test.py %s' % __file__)
|
apache-2.0
|
Inspq/ansible
|
lib/ansible/modules/notification/osx_say.py
|
70
|
2306
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2013, Michael DeHaan <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['stableinterface'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: osx_say
version_added: "1.2"
short_description: Makes an OSX computer to speak.
description:
- makes an OS computer speak! Amuse your friends, annoy your coworkers!
notes:
- If you like this module, you may also be interested in the osx_say callback in the plugins/ directory of the source checkout.
options:
msg:
description:
What to say
required: true
voice:
description:
What voice to use
required: false
requirements: [ say ]
author:
- "Ansible Core Team"
- "Michael DeHaan (@mpdehaan)"
'''
EXAMPLES = '''
- osx_say:
msg: '{{ inventory_hostname }} is all done'
voice: Zarvox
delegate_to: localhost
'''
DEFAULT_VOICE='Alex'
def say(module, msg, voice):
module.run_command(["/usr/bin/say", msg, "--voice=%s" % (voice)], check_rc=True)
def main():
module = AnsibleModule(
argument_spec=dict(
msg=dict(required=True),
voice=dict(required=False, default=DEFAULT_VOICE),
),
supports_check_mode=False
)
if not os.path.exists("/usr/bin/say"):
module.fail_json(msg="/usr/bin/say is not installed")
msg = module.params['msg']
voice = module.params['voice']
say(module, msg, voice)
module.exit_json(msg=msg, changed=False)
# import module snippets
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()
|
gpl-3.0
|
chirilo/kuma
|
vendor/packages/translate/convert/xliff2odf.py
|
23
|
6604
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2004-2014 Zuza Software Foundation
#
# This file is part of translate.
#
# translate is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# translate is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>.
"""Convert XLIFF translation files to OpenDocument (ODF) files.
See: http://docs.translatehouse.org/projects/translate-toolkit/en/latest/commands/odf2xliff.html
for examples and usage instructions.
"""
import zipfile
from cStringIO import StringIO
import lxml.etree as etree
from translate.convert import convert
from translate.storage import factory
from translate.storage.odf_io import copy_odf, open_odf
from translate.storage.odf_shared import (inline_elements,
no_translate_content_elements)
from translate.storage.xml_extract.extract import ParseState
from translate.storage.xml_extract.generate import (apply_translations,
replace_dom_text)
from translate.storage.xml_extract.unit_tree import XPathTree, build_unit_tree
def translate_odf(template, input_file):
def load_dom_trees(template):
"""Return a dict with translatable files in the template ODF package.
The keys are the filenames inside the ODF package, and the values are
the etrees for each of those translatable files.
"""
odf_data = open_odf(template)
return dict((filename, etree.parse(StringIO(data)))
for filename, data in odf_data.iteritems())
def load_unit_tree(input_file):
"""Return a dict with the translations grouped by files ODF package.
The keys are the filenames inside the template ODF package, and the
values are XPathTree instances for each of those files.
"""
store = factory.getobject(input_file)
tree = build_unit_tree(store)
def extract_unit_tree(filename, root_dom_element_name):
"""Find the subtree in 'tree' which corresponds to the data in XML
file 'filename'.
"""
try:
file_tree = tree.children[root_dom_element_name, 0]
except KeyError:
file_tree = XPathTree()
return (filename, file_tree)
return dict([extract_unit_tree('content.xml', 'office:document-content'),
extract_unit_tree('meta.xml', 'office:document-meta'),
extract_unit_tree('styles.xml', 'office:document-styles')])
def translate_dom_trees(unit_trees, dom_trees):
"""Return a dict with the translated files for the ODF package.
The keys are the filenames for the translatable files inside the
template ODF package, and the values are etree ElementTree instances
for each of those files.
"""
make_parse_state = lambda: ParseState(no_translate_content_elements,
inline_elements)
for filename, dom_tree in dom_trees.iteritems():
file_unit_tree = unit_trees[filename]
apply_translations(dom_tree.getroot(), file_unit_tree,
replace_dom_text(make_parse_state))
return dom_trees
dom_trees = load_dom_trees(template)
unit_trees = load_unit_tree(input_file)
return translate_dom_trees(unit_trees, dom_trees)
def write_odf(template, output_file, dom_trees):
"""Write the translated ODF package.
The resulting ODF package is a copy of the template ODF package, with the
translatable files replaced by their translated versions.
"""
template_zip = zipfile.ZipFile(template, 'r')
output_zip = zipfile.ZipFile(output_file, 'w',
compression=zipfile.ZIP_DEFLATED)
# Copy the ODF package.
output_zip = copy_odf(template_zip, output_zip, dom_trees.keys())
# Overwrite the translated files to the ODF package.
for filename, dom_tree in dom_trees.iteritems():
output_zip.writestr(filename, etree.tostring(dom_tree,
encoding='UTF-8',
xml_declaration=True))
def convertxliff(input_file, output_file, template):
"""Create a translated ODF using an ODF template and a XLIFF file."""
# Since the convertoptionsparser will give us an open file, we risk that
# it could have been opened in non-binary mode on Windows, and then we'll
# have problems, so let's make sure we have what we want.
template.close()
template = file(template.name, mode='rb')
output_file.close()
output_file = file(output_file.name, mode='wb')
xlf_data = input_file.read()
dom_trees = translate_odf(template, StringIO(xlf_data))
write_odf(template, output_file, dom_trees)
output_file.close()
return True
def main(argv=None):
formats = {
('xlf', 'odt'): ("odt", convertxliff), # Text
('xlf', 'ods'): ("ods", convertxliff), # Spreadsheet
('xlf', 'odp'): ("odp", convertxliff), # Presentation
('xlf', 'odg'): ("odg", convertxliff), # Drawing
('xlf', 'odc'): ("odc", convertxliff), # Chart
('xlf', 'odf'): ("odf", convertxliff), # Formula
('xlf', 'odi'): ("odi", convertxliff), # Image
('xlf', 'odm'): ("odm", convertxliff), # Master Document
('xlf', 'ott'): ("ott", convertxliff), # Text template
('xlf', 'ots'): ("ots", convertxliff), # Spreadsheet template
('xlf', 'otp'): ("otp", convertxliff), # Presentation template
('xlf', 'otg'): ("otg", convertxliff), # Drawing template
('xlf', 'otc'): ("otc", convertxliff), # Chart template
('xlf', 'otf'): ("otf", convertxliff), # Formula template
('xlf', 'oti'): ("oti", convertxliff), # Image template
('xlf', 'oth'): ("oth", convertxliff), # Web page template
}
parser = convert.ConvertOptionParser(formats, usetemplates=True, description=__doc__)
parser.run(argv)
if __name__ == '__main__':
main()
|
mpl-2.0
|
helpmoeny/pythoncode
|
Python_labs/lab10/warmup.py
|
1
|
1907
|
################################################################################
## Demonstration program for class Date
################################################################################
import date
print("[Testing Area-__init__]")
J = date.Date( brake, me, 2013 )#testin two arguments to function that are erroneous input
print( J )
print( J.to_iso() )
print( J.to_mdy() )
print( J.is_valid() )
print()
print("[Testing Area-spaces]")
F = date.Date()
F.from_mdy( "Oct ober 3 1, 199 4" )#testing with (random)spaces in function call
print( F )
print( F.to_iso() )
print( F.to_mdy() )
print( F.is_valid() )
print()
G = date.Date()
G.from_iso( "19 94- 10 - 3 1" )#testing with (random) spaces in function call
print( G )
print( G.to_iso() )
print( G.to_mdy() )
print( G.is_valid() )
print()
print("[Testing Area-erroneous input]")
H = date.Date()
H.from_mdy( "brake me" )#testing with erroneous input
print( H )
print( H.to_iso() )
print( H.to_mdy() )
print( H.is_valid() )
print()
I = date.Date()
I.from_iso( "brake me" )#testing with erroneous input
print( I )
print( I.to_iso() )
print( I.to_mdy() )
print( I.is_valid() )
print()
# with all these examples it is pretty obvious that the parameters need to be exactly to code
# that means directly to what format it expects with no spaces or weird input
A = date.Date( 1, 1, 2013 )
print( A )
print( A.to_iso() )
print( A.to_mdy() )
print( A.is_valid() )
print()
B = date.Date( 12, 31, 2013 )
print( B )
print( B.to_iso() )
print( B.to_mdy() )
print( B.is_valid() )
print()
C = date.Date()
C.from_iso( "2013-11-28" )
print( C )
print( C.to_iso() )
print( C.to_mdy() )
print( C.is_valid() )
print()
D = date.Date()
D.from_mdy( "March 15, 2015" )
print( D )
print( D.to_iso() )
print( D.to_mdy() )
print( D.is_valid() )
print()
E = date.Date()
print( E )
print( E.to_iso() )
print( E.to_mdy() )
print( E.is_valid() )
print()
|
unlicense
|
bzero/networkx
|
networkx/algorithms/components/strongly_connected.py
|
30
|
11937
|
# -*- coding: utf-8 -*-
"""Strongly connected components.
"""
# Copyright (C) 2004-2015 by
# Aric Hagberg <[email protected]>
# Dan Schult <[email protected]>
# Pieter Swart <[email protected]>
# All rights reserved.
# BSD license.
import networkx as nx
from networkx.utils.decorators import not_implemented_for
__authors__ = "\n".join(['Eben Kenah',
'Aric Hagberg ([email protected])'
'Christopher Ellison',
'Ben Edwards ([email protected])'])
__all__ = ['number_strongly_connected_components',
'strongly_connected_components',
'strongly_connected_component_subgraphs',
'is_strongly_connected',
'strongly_connected_components_recursive',
'kosaraju_strongly_connected_components',
'condensation']
@not_implemented_for('undirected')
def strongly_connected_components(G):
"""Generate nodes in strongly connected components of graph.
Parameters
----------
G : NetworkX Graph
An directed graph.
Returns
-------
comp : generator of sets
A generator of sets of nodes, one for each strongly connected
component of G.
Raises
------
NetworkXNotImplemented:
If G is undirected.
Examples
--------
Generate a sorted list of strongly connected components, largest first.
>>> G = nx.cycle_graph(4, create_using=nx.DiGraph())
>>> G.add_cycle([10, 11, 12])
>>> [len(c) for c in sorted(nx.strongly_connected_components(G),
... key=len, reverse=True)]
[4, 3]
If you only want the largest component, it's more efficient to
use max instead of sort.
>>> largest = max(nx.strongly_connected_components(G), key=len)
See Also
--------
connected_components,
weakly_connected_components
Notes
-----
Uses Tarjan's algorithm with Nuutila's modifications.
Nonrecursive version of algorithm.
References
----------
.. [1] Depth-first search and linear graph algorithms, R. Tarjan
SIAM Journal of Computing 1(2):146-160, (1972).
.. [2] On finding the strongly connected components in a directed graph.
E. Nuutila and E. Soisalon-Soinen
Information Processing Letters 49(1): 9-14, (1994)..
"""
preorder = {}
lowlink = {}
scc_found = {}
scc_queue = []
i = 0 # Preorder counter
for source in G:
if source not in scc_found:
queue = [source]
while queue:
v = queue[-1]
if v not in preorder:
i = i + 1
preorder[v] = i
done = 1
v_nbrs = G[v]
for w in v_nbrs:
if w not in preorder:
queue.append(w)
done = 0
break
if done == 1:
lowlink[v] = preorder[v]
for w in v_nbrs:
if w not in scc_found:
if preorder[w] > preorder[v]:
lowlink[v] = min([lowlink[v], lowlink[w]])
else:
lowlink[v] = min([lowlink[v], preorder[w]])
queue.pop()
if lowlink[v] == preorder[v]:
scc_found[v] = True
scc = {v}
while scc_queue and preorder[scc_queue[-1]] > preorder[v]:
k = scc_queue.pop()
scc_found[k] = True
scc.add(k)
yield scc
else:
scc_queue.append(v)
@not_implemented_for('undirected')
def kosaraju_strongly_connected_components(G, source=None):
"""Generate nodes in strongly connected components of graph.
Parameters
----------
G : NetworkX Graph
An directed graph.
Returns
-------
comp : generator of sets
A genrator of sets of nodes, one for each strongly connected
component of G.
Raises
------
NetworkXNotImplemented:
If G is undirected.
Examples
--------
Generate a sorted list of strongly connected components, largest first.
>>> G = nx.cycle_graph(4, create_using=nx.DiGraph())
>>> G.add_cycle([10, 11, 12])
>>> [len(c) for c in sorted(nx.kosaraju_strongly_connected_components(G),
... key=len, reverse=True)]
[4, 3]
If you only want the largest component, it's more efficient to
use max instead of sort.
>>> largest = max(nx.kosaraju_strongly_connected_components(G), key=len)
See Also
--------
connected_components
weakly_connected_components
Notes
-----
Uses Kosaraju's algorithm.
"""
with nx.utils.reversed(G):
post = list(nx.dfs_postorder_nodes(G, source=source))
seen = set()
while post:
r = post.pop()
if r in seen:
continue
c = nx.dfs_preorder_nodes(G, r)
new = {v for v in c if v not in seen}
yield new
seen.update(new)
@not_implemented_for('undirected')
def strongly_connected_components_recursive(G):
"""Generate nodes in strongly connected components of graph.
Recursive version of algorithm.
Parameters
----------
G : NetworkX Graph
An directed graph.
Returns
-------
comp : generator of sets
A generator of sets of nodes, one for each strongly connected
component of G.
Raises
------
NetworkXNotImplemented:
If G is undirected
Examples
--------
Generate a sorted list of strongly connected components, largest first.
>>> G = nx.cycle_graph(4, create_using=nx.DiGraph())
>>> G.add_cycle([10, 11, 12])
>>> [len(c) for c in sorted(nx.strongly_connected_components_recursive(G),
... key=len, reverse=True)]
[4, 3]
If you only want the largest component, it's more efficient to
use max instead of sort.
>>> largest = max(nx.strongly_connected_components_recursive(G), key=len)
See Also
--------
connected_components
Notes
-----
Uses Tarjan's algorithm with Nuutila's modifications.
References
----------
.. [1] Depth-first search and linear graph algorithms, R. Tarjan
SIAM Journal of Computing 1(2):146-160, (1972).
.. [2] On finding the strongly connected components in a directed graph.
E. Nuutila and E. Soisalon-Soinen
Information Processing Letters 49(1): 9-14, (1994)..
"""
def visit(v, cnt):
root[v] = cnt
visited[v] = cnt
cnt += 1
stack.append(v)
for w in G[v]:
if w not in visited:
for c in visit(w, cnt):
yield c
if w not in component:
root[v] = min(root[v], root[w])
if root[v] == visited[v]:
component[v] = root[v]
tmpc = {v} # hold nodes in this component
while stack[-1] != v:
w = stack.pop()
component[w] = root[v]
tmpc.add(w)
stack.remove(v)
yield tmpc
visited = {}
component = {}
root = {}
cnt = 0
stack = []
for source in G:
if source not in visited:
for c in visit(source, cnt):
yield c
@not_implemented_for('undirected')
def strongly_connected_component_subgraphs(G, copy=True):
"""Generate strongly connected components as subgraphs.
Parameters
----------
G : NetworkX Graph
A directed graph.
copy : boolean, optional
if copy is True, Graph, node, and edge attributes are copied to
the subgraphs.
Returns
-------
comp : generator of graphs
A generator of graphs, one for each strongly connected component of G.
Examples
--------
Generate a sorted list of strongly connected components, largest first.
>>> G = nx.cycle_graph(4, create_using=nx.DiGraph())
>>> G.add_cycle([10, 11, 12])
>>> [len(Gc) for Gc in sorted(nx.strongly_connected_component_subgraphs(G),
... key=len, reverse=True)]
[4, 3]
If you only want the largest component, it's more efficient to
use max instead of sort.
>>> Gc = max(nx.strongly_connected_component_subgraphs(G), key=len)
See Also
--------
connected_component_subgraphs
weakly_connected_component_subgraphs
"""
for comp in strongly_connected_components(G):
if copy:
yield G.subgraph(comp).copy()
else:
yield G.subgraph(comp)
@not_implemented_for('undirected')
def number_strongly_connected_components(G):
"""Return number of strongly connected components in graph.
Parameters
----------
G : NetworkX graph
A directed graph.
Returns
-------
n : integer
Number of strongly connected components
See Also
--------
connected_components
Notes
-----
For directed graphs only.
"""
return len(list(strongly_connected_components(G)))
@not_implemented_for('undirected')
def is_strongly_connected(G):
"""Test directed graph for strong connectivity.
Parameters
----------
G : NetworkX Graph
A directed graph.
Returns
-------
connected : bool
True if the graph is strongly connected, False otherwise.
See Also
--------
strongly_connected_components
Notes
-----
For directed graphs only.
"""
if len(G) == 0:
raise nx.NetworkXPointlessConcept(
"""Connectivity is undefined for the null graph.""")
return len(list(strongly_connected_components(G))[0]) == len(G)
@not_implemented_for('undirected')
def condensation(G, scc=None):
"""Returns the condensation of G.
The condensation of G is the graph with each of the strongly connected
components contracted into a single node.
Parameters
----------
G : NetworkX DiGraph
A directed graph.
scc: list or generator (optional, default=None)
Strongly connected components. If provided, the elements in
`scc` must partition the nodes in `G`. If not provided, it will be
calculated as scc=nx.strongly_connected_components(G).
Returns
-------
C : NetworkX DiGraph
The condensation graph C of G. The node labels are integers
corresponding to the index of the component in the list of
strongly connected components of G. C has a graph attribute named
'mapping' with a dictionary mapping the original nodes to the
nodes in C to which they belong. Each node in C also has a node
attribute 'members' with the set of original nodes in G that
form the SCC that the node in C represents.
Raises
------
NetworkXNotImplemented:
If G is not directed
Notes
-----
After contracting all strongly connected components to a single node,
the resulting graph is a directed acyclic graph.
"""
if scc is None:
scc = nx.strongly_connected_components(G)
mapping = {}
members = {}
C = nx.DiGraph()
for i, component in enumerate(scc):
members[i] = component
mapping.update((n, i) for n in component)
number_of_components = i + 1
C.add_nodes_from(range(number_of_components))
C.add_edges_from((mapping[u], mapping[v]) for u, v in G.edges_iter()
if mapping[u] != mapping[v])
# Add a list of members (ie original nodes) to each node (ie scc) in C.
nx.set_node_attributes(C, 'members', members)
# Add mapping dict as graph attribute
C.graph['mapping'] = mapping
return C
|
bsd-3-clause
|
Pluto-tv/chromium-crosswalk
|
tools/vim/tests/chromium.ycm_extra_conf_unittest.py
|
7
|
10297
|
#!/usr/bin/env python
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Tests for chromium.ycm_extra_conf.
These tests should be getting picked up by the PRESUBMIT.py in /tools/vim.
Currently the tests only run on Linux and require 'ninja' to be available on
PATH. Due to these requirements, the tests should only be run on upload.
"""
import imp
import os
import shutil
import stat
import string
import subprocess
import sys
import tempfile
import unittest
def CreateFile(path,
copy_from = None,
format_with = None,
make_executable = False):
"""Creates a file.
If a file already exists at |path|, it will be overwritten.
Args:
path: (String) Absolute path for file to be created.
copy_from: (String or None) Absolute path to source file. If valid, the
contents of this file will be written to |path|.
format_with: (Dictionary or None) Only valid if |copy_from| is also valid.
The contents of the file at |copy_from| will be passed through
string.Formatter.vformat() with this parameter as the dictionary.
make_executable: (Boolean) If true, |file| will be made executable.
"""
if not os.path.isabs(path):
raise Exception(
'Argument |path| needs to be an absolute path. Got: "{}"'.format(path))
with open(path, 'w') as f:
if copy_from:
with open(copy_from, 'r') as source:
contents = source.read()
if format_with:
formatter = string.Formatter()
contents = formatter.vformat(contents, None, format_with)
f.write(contents)
if make_executable:
statinfo = os.stat(path)
os.chmod(path, statinfo.st_mode | stat.S_IXUSR)
@unittest.skipIf(sys.platform.startswith('linux'),
'Tests are only valid on Linux.')
class Chromium_ycmExtraConfTest_NotOnLinux(unittest.TestCase):
def testAlwaysFailsIfNotRunningOnLinux(self):
self.fail('Changes to chromium.ycm_extra_conf.py currently need to be ' \
'uploaded from Linux since the tests only run on Linux.')
@unittest.skipUnless(sys.platform.startswith('linux'),
'Tests are only valid on Linux.')
class Chromium_ycmExtraConfTest(unittest.TestCase):
def SetUpFakeChromeTreeBelowPath(self):
"""Create fake Chromium source tree under self.test_root.
The fake source tree has the following contents:
<self.test_root>
| .gclient
|
+-- src
| | DEPS
| | three.cc
| |
| +-- .git
|
+-- out
|
+-- Debug
build.ninja
"""
self.chrome_root = os.path.abspath(os.path.normpath(
os.path.join(self.test_root, 'src')))
self.out_dir = os.path.join(self.chrome_root, 'out', 'Debug')
os.makedirs(self.chrome_root)
os.makedirs(os.path.join(self.chrome_root, '.git'))
os.makedirs(self.out_dir)
CreateFile(os.path.join(self.test_root, '.gclient'))
CreateFile(os.path.join(self.chrome_root, 'DEPS'))
CreateFile(os.path.join(self.chrome_root, 'three.cc'))
# Fake ninja build file. Applications of 'cxx' rule are tagged by which
# source file was used as input so that the test can verify that the correct
# build dependency was used.
CreateFile(os.path.join(self.out_dir, 'build.ninja'),
copy_from=os.path.join(self.test_data_path,
'fake_build_ninja.txt'))
def NormalizeString(self, string):
return string.replace(self.out_dir, '[OUT]').\
replace(self.chrome_root, '[SRC]')
def NormalizeStringsInList(self, list_of_strings):
return [self.NormalizeString(s) for s in list_of_strings]
def setUp(self):
self.actual_chrome_root = os.path.normpath(
os.path.join(os.path.dirname(os.path.abspath(__file__)), '../../..'))
sys.path.append(os.path.join(self.actual_chrome_root, 'tools', 'vim'))
self.test_data_path = os.path.join(self.actual_chrome_root, 'tools', 'vim',
'tests', 'data')
self.ycm_extra_conf = imp.load_source('ycm_extra_conf',
'chromium.ycm_extra_conf.py')
self.test_root = tempfile.mkdtemp()
self.SetUpFakeChromeTreeBelowPath()
def tearDown(self):
if self.test_root:
shutil.rmtree(self.test_root)
def testNinjaIsAvailable(self):
p = subprocess.Popen(['ninja', '--version'], stdout=subprocess.PIPE)
_, _ = p.communicate()
self.assertFalse(p.returncode)
def testFindChromeSrc(self):
chrome_source = self.ycm_extra_conf.FindChromeSrcFromFilename(
os.path.join(self.chrome_root, 'chrome', 'one.cpp'))
self.assertEquals(chrome_source, self.chrome_root)
chrome_source = self.ycm_extra_conf.FindChromeSrcFromFilename(
os.path.join(self.chrome_root, 'one.cpp'))
self.assertEquals(chrome_source, self.chrome_root)
def testCommandLineForKnownCppFile(self):
command_line = self.ycm_extra_conf.GetClangCommandLineFromNinjaForSource(
self.out_dir, os.path.join(self.chrome_root, 'one.cpp'))
self.assertEquals(
command_line, ('../../fake-clang++ -Ia -isysroot /mac.sdk -Itag-one '
'../../one.cpp -o obj/one.o'))
def testCommandLineForUnknownCppFile(self):
command_line = self.ycm_extra_conf.GetClangCommandLineFromNinjaForSource(
self.out_dir, os.path.join(self.chrome_root, 'unknown.cpp'))
self.assertEquals(command_line, None)
def testGetClangOptionsForKnownCppFile(self):
clang_options = \
self.ycm_extra_conf.GetClangOptionsFromNinjaForFilename(
self.chrome_root, os.path.join(self.chrome_root, 'one.cpp'))
self.assertEquals(self.NormalizeStringsInList(clang_options), [
'-I[SRC]',
'-Wno-unknown-warning-option',
'-I[OUT]/a',
'-isysroot',
'/mac.sdk',
'-I[OUT]/tag-one'
])
def testGetFlagsForFileForKnownCppFile(self):
result = self.ycm_extra_conf.FlagsForFile(
os.path.join(self.chrome_root, 'one.cpp'))
self.assertTrue(result)
self.assertTrue('do_cache' in result)
self.assertTrue(result['do_cache'])
self.assertTrue('flags' in result)
self.assertEquals(self.NormalizeStringsInList(result['flags']), [
'-DUSE_CLANG_COMPLETER',
'-std=c++11',
'-x', 'c++',
'-I[SRC]',
'-Wno-unknown-warning-option',
'-I[OUT]/a',
'-isysroot',
'/mac.sdk',
'-I[OUT]/tag-one'
])
def testGetFlagsForFileForUnknownCppFile(self):
result = self.ycm_extra_conf.FlagsForFile(
os.path.join(self.chrome_root, 'nonexistent.cpp'))
self.assertTrue(result)
self.assertTrue('do_cache' in result)
self.assertTrue(result['do_cache'])
self.assertTrue('flags' in result)
self.assertEquals(self.NormalizeStringsInList(result['flags']), [
'-DUSE_CLANG_COMPLETER',
'-std=c++11',
'-x', 'c++',
'-I[SRC]',
'-Wno-unknown-warning-option',
'-I[OUT]/a',
'-isysroot',
'/mac.sdk',
'-I[OUT]/tag-default'
])
def testGetFlagsForFileForUnknownHeaderFile(self):
result = self.ycm_extra_conf.FlagsForFile(
os.path.join(self.chrome_root, 'nonexistent.h'))
self.assertTrue(result)
self.assertTrue('do_cache' in result)
self.assertTrue(result['do_cache'])
self.assertTrue('flags' in result)
self.assertEquals(self.NormalizeStringsInList(result['flags']), [
'-DUSE_CLANG_COMPLETER',
'-std=c++11',
'-x', 'c++',
'-I[SRC]',
'-Wno-unknown-warning-option',
'-I[OUT]/a',
'-isysroot',
'/mac.sdk',
'-I[OUT]/tag-default'
])
def testGetFlagsForFileForKnownHeaderFileWithAssociatedCppFile(self):
result = self.ycm_extra_conf.FlagsForFile(
os.path.join(self.chrome_root, 'three.h'))
self.assertTrue(result)
self.assertTrue('do_cache' in result)
self.assertTrue(result['do_cache'])
self.assertTrue('flags' in result)
self.assertEquals(self.NormalizeStringsInList(result['flags']), [
'-DUSE_CLANG_COMPLETER',
'-std=c++11',
'-x', 'c++',
'-I[SRC]',
'-Wno-unknown-warning-option',
'-I[OUT]/a',
'-isysroot',
'/mac.sdk',
'-I[OUT]/tag-three'
])
def testSourceFileWithNonClangOutputs(self):
# Verify assumption that four.cc has non-compiler-output listed as the first
# output.
p = subprocess.Popen(['ninja', '-C', self.out_dir, '-t',
'query', '../../four.cc'],
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
stdout, _ = p.communicate()
self.assertFalse(p.returncode)
self.assertEquals(stdout,
'../../four.cc:\n'
' outputs:\n'
' obj/linker-output.o\n'
' obj/four.o\n')
result = self.ycm_extra_conf.FlagsForFile(
os.path.join(self.chrome_root, 'four.cc'))
self.assertTrue(result)
self.assertTrue('do_cache' in result)
self.assertTrue(result['do_cache'])
self.assertTrue('flags' in result)
self.assertEquals(self.NormalizeStringsInList(result['flags']), [
'-DUSE_CLANG_COMPLETER',
'-std=c++11',
'-x', 'c++',
'-I[SRC]',
'-Wno-unknown-warning-option',
'-I[OUT]/a',
'-isysroot',
'/mac.sdk',
'-I[OUT]/tag-four'
])
def testSourceFileWithOnlyNonClangOutputs(self):
result = self.ycm_extra_conf.FlagsForFile(
os.path.join(self.chrome_root, 'five.cc'))
self.assertTrue(result)
self.assertTrue('do_cache' in result)
self.assertTrue(result['do_cache'])
self.assertTrue('flags' in result)
self.assertEquals(self.NormalizeStringsInList(result['flags']), [
'-DUSE_CLANG_COMPLETER',
'-std=c++11',
'-x', 'c++',
'-I[SRC]',
'-Wno-unknown-warning-option',
'-I[OUT]/a',
'-isysroot',
'/mac.sdk',
'-I[OUT]/tag-default'
])
if __name__ == '__main__':
unittest.main()
|
bsd-3-clause
|
nibrahim/PlasTeX
|
plasTeX/ConfigManager/String.py
|
8
|
1512
|
#!/usr/bin/env python
from UserString import UserString
from Generic import GenericOption, DEFAULTS, GenericParser, GenericArgument
class StringParser(GenericParser): pass
class StringOption(StringParser, GenericOption, UserString):
""" String configuration option """
synopsis = ''
def __init__(self, docstring=DEFAULTS['docstring'],
options=DEFAULTS['options'],
default=DEFAULTS['default'],
optional=DEFAULTS['optional'],
values=DEFAULTS['values'],
category=DEFAULTS['category'],
callback=DEFAULTS['callback'],
synopsis=DEFAULTS['synopsis'],
environ=DEFAULTS['environ'],
registry=DEFAULTS['registry'],
mandatory=None,
name=DEFAULTS['name'],
source=DEFAULTS['source']):
UserString.__init__(self, '')
GenericOption.initialize(self, locals())
def cast(self, arg):
if arg is None: return
return unicode(arg)
def __iadd__(self, other):
if callable(self.callback):
other = self.callback(self.cast(other))
if other is None:
return self
if self.data is None:
self.data = self.cast(other)
else:
self.data += '\n%s' % self.cast(other)
return self
class StringArgument(GenericArgument, StringOption):
""" String command-line option """
|
mit
|
jean/sentry
|
src/sentry/south_migrations/0131_auto__add_organizationmember__add_unique_organizationmember_organizati.py
|
2
|
42604
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'OrganizationMember'
db.create_table(
'sentry_organizationmember', (
('id', self.gf('sentry.db.models.fields.BoundedBigAutoField')(primary_key=True)), (
'organization', self.gf('sentry.db.models.fields.FlexibleForeignKey')(
related_name='member_set', to=orm['sentry.Organization']
)
), (
'user', self.gf('sentry.db.models.fields.FlexibleForeignKey')(
related_name='sentry_orgmember_set', to=orm['sentry.User']
)
), ('type', self.gf('django.db.models.fields.PositiveIntegerField')(default=50)), (
'date_added',
self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime.now)
),
)
)
db.send_create_signal('sentry', ['OrganizationMember'])
# Adding unique constraint on 'OrganizationMember', fields ['organization', 'user']
db.create_unique('sentry_organizationmember', ['organization_id', 'user_id'])
# Adding model 'Organization'
db.create_table(
'sentry_organization', (
('id', self.gf('sentry.db.models.fields.BoundedBigAutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(max_length=64)), (
'owner',
self.gf('sentry.db.models.fields.FlexibleForeignKey')(to=orm['sentry.User'])
), ('status', self.gf('django.db.models.fields.PositiveIntegerField')(default=0)), (
'date_added',
self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime.now)
),
)
)
db.send_create_signal('sentry', ['Organization'])
# Adding field 'Team.organization'
db.add_column(
'sentry_team',
'organization',
self.gf('sentry.db.models.fields.FlexibleForeignKey')(
to=orm['sentry.Organization'], null=True
),
keep_default=False
)
def backwards(self, orm):
# Removing unique constraint on 'OrganizationMember', fields ['organization', 'user']
db.delete_unique('sentry_organizationmember', ['organization_id', 'user_id'])
# Deleting model 'OrganizationMember'
db.delete_table('sentry_organizationmember')
# Deleting model 'Organization'
db.delete_table('sentry_organization')
# Deleting field 'Team.organization'
db.delete_column('sentry_team', 'organization_id')
models = {
'sentry.accessgroup': {
'Meta': {
'unique_together': "(('team', 'name'),)",
'object_name': 'AccessGroup'
},
'data': ('django.db.models.fields.TextField', [], {
'null': 'True',
'blank': 'True'
}),
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id': ('sentry.db.models.fields.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'managed': ('django.db.models.fields.BooleanField', [], {
'default': 'False'
}),
'members': (
'django.db.models.fields.related.ManyToManyField', [], {
'to': "orm['sentry.User']",
'symmetrical': 'False'
}
),
'name': ('django.db.models.fields.CharField', [], {
'max_length': '64'
}),
'projects': (
'django.db.models.fields.related.ManyToManyField', [], {
'to': "orm['sentry.Project']",
'symmetrical': 'False'
}
),
'team':
('sentry.db.models.fields.FlexibleForeignKey', [], {
'to': "orm['sentry.Team']"
}),
'type': ('django.db.models.fields.IntegerField', [], {
'default': '50'
})
},
'sentry.activity': {
'Meta': {
'object_name': 'Activity'
},
'data': ('django.db.models.fields.TextField', [], {
'null': 'True'
}),
'datetime':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'event': (
'sentry.db.models.fields.FlexibleForeignKey', [], {
'to': "orm['sentry.Event']",
'null': 'True'
}
),
'group': (
'sentry.db.models.fields.FlexibleForeignKey', [], {
'to': "orm['sentry.Group']",
'null': 'True'
}
),
'id': ('sentry.db.models.fields.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'ident':
('django.db.models.fields.CharField', [], {
'max_length': '64',
'null': 'True'
}),
'project':
('sentry.db.models.fields.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}),
'type': ('django.db.models.fields.PositiveIntegerField', [], {}),
'user': (
'sentry.db.models.fields.FlexibleForeignKey', [], {
'to': "orm['sentry.User']",
'null': 'True'
}
)
},
'sentry.alert': {
'Meta': {
'object_name': 'Alert'
},
'data': ('django.db.models.fields.TextField', [], {
'null': 'True'
}),
'datetime':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'group': (
'sentry.db.models.fields.FlexibleForeignKey', [], {
'to': "orm['sentry.Group']",
'null': 'True'
}
),
'id': ('sentry.db.models.fields.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'message': ('django.db.models.fields.TextField', [], {}),
'project':
('sentry.db.models.fields.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}),
'related_groups': (
'django.db.models.fields.related.ManyToManyField', [], {
'related_name': "'related_alerts'",
'symmetrical': 'False',
'through': "orm['sentry.AlertRelatedGroup']",
'to': "orm['sentry.Group']"
}
),
'status': (
'django.db.models.fields.PositiveIntegerField', [], {
'default': '0',
'db_index': 'True'
}
)
},
'sentry.alertrelatedgroup': {
'Meta': {
'unique_together': "(('group', 'alert'),)",
'object_name': 'AlertRelatedGroup'
},
'alert':
('sentry.db.models.fields.FlexibleForeignKey', [], {
'to': "orm['sentry.Alert']"
}),
'data': ('django.db.models.fields.TextField', [], {
'null': 'True'
}),
'group':
('sentry.db.models.fields.FlexibleForeignKey', [], {
'to': "orm['sentry.Group']"
}),
'id': ('sentry.db.models.fields.BoundedBigAutoField', [], {
'primary_key': 'True'
})
},
'sentry.broadcast': {
'Meta': {
'object_name': 'Broadcast'
},
'badge': (
'django.db.models.fields.CharField', [], {
'max_length': '32',
'null': 'True',
'blank': 'True'
}
),
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id': ('sentry.db.models.fields.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'is_active':
('django.db.models.fields.BooleanField', [], {
'default': 'True',
'db_index': 'True'
}),
'link': (
'django.db.models.fields.URLField', [], {
'max_length': '200',
'null': 'True',
'blank': 'True'
}
),
'message': ('django.db.models.fields.CharField', [], {
'max_length': '256'
})
},
'sentry.event': {
'Meta': {
'unique_together': "(('project', 'event_id'),)",
'object_name': 'Event',
'db_table': "'sentry_message'",
'index_together': "(('group', 'datetime'),)"
},
'checksum':
('django.db.models.fields.CharField', [], {
'max_length': '32',
'db_index': 'True'
}),
'data': ('django.db.models.fields.TextField', [], {
'null': 'True',
'blank': 'True'
}),
'datetime': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'db_index': 'True'
}
),
'event_id': (
'django.db.models.fields.CharField', [], {
'max_length': '32',
'null': 'True',
'db_column': "'message_id'"
}
),
'group': (
'sentry.db.models.fields.FlexibleForeignKey', [], {
'blank': 'True',
'related_name': "'event_set'",
'null': 'True',
'to': "orm['sentry.Group']"
}
),
'id': ('sentry.db.models.fields.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'message': ('django.db.models.fields.TextField', [], {}),
'num_comments':
('django.db.models.fields.PositiveIntegerField', [], {
'default': '0',
'null': 'True'
}),
'platform':
('django.db.models.fields.CharField', [], {
'max_length': '64',
'null': 'True'
}),
'project': (
'sentry.db.models.fields.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']",
'null': 'True'
}
),
'time_spent': ('django.db.models.fields.IntegerField', [], {
'null': 'True'
})
},
'sentry.eventmapping': {
'Meta': {
'unique_together': "(('project', 'event_id'),)",
'object_name': 'EventMapping'
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'event_id': ('django.db.models.fields.CharField', [], {
'max_length': '32'
}),
'group':
('sentry.db.models.fields.FlexibleForeignKey', [], {
'to': "orm['sentry.Group']"
}),
'id': ('sentry.db.models.fields.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'project':
('sentry.db.models.fields.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
})
},
'sentry.group': {
'Meta': {
'unique_together': "(('project', 'checksum'),)",
'object_name': 'Group',
'db_table': "'sentry_groupedmessage'"
},
'active_at':
('django.db.models.fields.DateTimeField', [], {
'null': 'True',
'db_index': 'True'
}),
'checksum':
('django.db.models.fields.CharField', [], {
'max_length': '32',
'db_index': 'True'
}),
'culprit': (
'django.db.models.fields.CharField', [], {
'max_length': '200',
'null': 'True',
'db_column': "'view'",
'blank': 'True'
}
),
'data': ('django.db.models.fields.TextField', [], {
'null': 'True',
'blank': 'True'
}),
'first_seen': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'db_index': 'True'
}
),
'id': ('sentry.db.models.fields.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'is_public': (
'django.db.models.fields.NullBooleanField', [], {
'default': 'False',
'null': 'True',
'blank': 'True'
}
),
'last_seen': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'db_index': 'True'
}
),
'level': (
'django.db.models.fields.PositiveIntegerField', [], {
'default': '40',
'db_index': 'True',
'blank': 'True'
}
),
'logger': (
'django.db.models.fields.CharField', [], {
'default': "'root'",
'max_length': '64',
'db_index': 'True',
'blank': 'True'
}
),
'message': ('django.db.models.fields.TextField', [], {}),
'num_comments':
('django.db.models.fields.PositiveIntegerField', [], {
'default': '0',
'null': 'True'
}),
'platform':
('django.db.models.fields.CharField', [], {
'max_length': '64',
'null': 'True'
}),
'project': (
'sentry.db.models.fields.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']",
'null': 'True'
}
),
'resolved_at':
('django.db.models.fields.DateTimeField', [], {
'null': 'True',
'db_index': 'True'
}),
'score': ('django.db.models.fields.IntegerField', [], {
'default': '0'
}),
'status': (
'django.db.models.fields.PositiveIntegerField', [], {
'default': '0',
'db_index': 'True'
}
),
'time_spent_count': ('django.db.models.fields.IntegerField', [], {
'default': '0'
}),
'time_spent_total': ('django.db.models.fields.IntegerField', [], {
'default': '0'
}),
'times_seen': (
'django.db.models.fields.PositiveIntegerField', [], {
'default': '1',
'db_index': 'True'
}
)
},
'sentry.groupassignee': {
'Meta': {
'object_name': 'GroupAssignee',
'db_table': "'sentry_groupasignee'"
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'group': (
'sentry.db.models.fields.FlexibleForeignKey', [], {
'related_name': "'assignee_set'",
'unique': 'True',
'to': "orm['sentry.Group']"
}
),
'id': ('sentry.db.models.fields.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'project': (
'sentry.db.models.fields.FlexibleForeignKey', [], {
'related_name': "'assignee_set'",
'to': "orm['sentry.Project']"
}
),
'user': (
'sentry.db.models.fields.FlexibleForeignKey', [], {
'related_name': "'sentry_assignee_set'",
'to': "orm['sentry.User']"
}
)
},
'sentry.groupbookmark': {
'Meta': {
'unique_together': "(('project', 'user', 'group'),)",
'object_name': 'GroupBookmark'
},
'group': (
'sentry.db.models.fields.FlexibleForeignKey', [], {
'related_name': "'bookmark_set'",
'to': "orm['sentry.Group']"
}
),
'id': ('sentry.db.models.fields.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'project': (
'sentry.db.models.fields.FlexibleForeignKey', [], {
'related_name': "'bookmark_set'",
'to': "orm['sentry.Project']"
}
),
'user': (
'sentry.db.models.fields.FlexibleForeignKey', [], {
'related_name': "'sentry_bookmark_set'",
'to': "orm['sentry.User']"
}
)
},
'sentry.grouphash': {
'Meta': {
'unique_together': "(('project', 'hash'),)",
'object_name': 'GroupHash'
},
'group': (
'sentry.db.models.fields.FlexibleForeignKey', [], {
'to': "orm['sentry.Group']",
'null': 'True'
}
),
'hash':
('django.db.models.fields.CharField', [], {
'max_length': '32',
'db_index': 'True'
}),
'id': ('sentry.db.models.fields.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'project': (
'sentry.db.models.fields.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']",
'null': 'True'
}
)
},
'sentry.groupmeta': {
'Meta': {
'unique_together': "(('group', 'key'),)",
'object_name': 'GroupMeta'
},
'group':
('sentry.db.models.fields.FlexibleForeignKey', [], {
'to': "orm['sentry.Group']"
}),
'id': ('sentry.db.models.fields.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key': ('django.db.models.fields.CharField', [], {
'max_length': '64'
}),
'value': ('django.db.models.fields.TextField', [], {})
},
'sentry.grouprulestatus': {
'Meta': {
'unique_together': "(('rule', 'group'),)",
'object_name': 'GroupRuleStatus'
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'group':
('sentry.db.models.fields.FlexibleForeignKey', [], {
'to': "orm['sentry.Group']"
}),
'id': ('sentry.db.models.fields.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'project':
('sentry.db.models.fields.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}),
'rule':
('sentry.db.models.fields.FlexibleForeignKey', [], {
'to': "orm['sentry.Rule']"
}),
'status': ('django.db.models.fields.PositiveSmallIntegerField', [], {
'default': '0'
})
},
'sentry.groupseen': {
'Meta': {
'unique_together': "(('user', 'group'),)",
'object_name': 'GroupSeen'
},
'group':
('sentry.db.models.fields.FlexibleForeignKey', [], {
'to': "orm['sentry.Group']"
}),
'id': ('sentry.db.models.fields.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'last_seen':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'project':
('sentry.db.models.fields.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}),
'user': (
'sentry.db.models.fields.FlexibleForeignKey', [], {
'to': "orm['sentry.User']",
'db_index': 'False'
}
)
},
'sentry.grouptagkey': {
'Meta': {
'unique_together': "(('project', 'group', 'key'),)",
'object_name': 'GroupTagKey'
},
'group':
('sentry.db.models.fields.FlexibleForeignKey', [], {
'to': "orm['sentry.Group']"
}),
'id': ('sentry.db.models.fields.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key': ('django.db.models.fields.CharField', [], {
'max_length': '32'
}),
'project': (
'sentry.db.models.fields.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']",
'null': 'True'
}
),
'values_seen': ('django.db.models.fields.PositiveIntegerField', [], {
'default': '0'
})
},
'sentry.grouptagvalue': {
'Meta': {
'unique_together': "(('project', 'key', 'value', 'group'),)",
'object_name': 'GroupTagValue',
'db_table': "'sentry_messagefiltervalue'"
},
'first_seen': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'null': 'True',
'db_index': 'True'
}
),
'group': (
'sentry.db.models.fields.FlexibleForeignKey', [], {
'related_name': "'grouptag'",
'to': "orm['sentry.Group']"
}
),
'id': ('sentry.db.models.fields.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key': ('django.db.models.fields.CharField', [], {
'max_length': '32'
}),
'last_seen': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'null': 'True',
'db_index': 'True'
}
),
'project': (
'sentry.db.models.fields.FlexibleForeignKey', [], {
'related_name': "'grouptag'",
'null': 'True',
'to': "orm['sentry.Project']"
}
),
'times_seen': ('django.db.models.fields.PositiveIntegerField', [], {
'default': '0'
}),
'value': ('django.db.models.fields.CharField', [], {
'max_length': '200'
})
},
'sentry.lostpasswordhash': {
'Meta': {
'object_name': 'LostPasswordHash'
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'hash': ('django.db.models.fields.CharField', [], {
'max_length': '32'
}),
'id': ('sentry.db.models.fields.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'user': (
'sentry.db.models.fields.FlexibleForeignKey', [], {
'to': "orm['sentry.User']",
'unique': 'True'
}
)
},
'sentry.option': {
'Meta': {
'object_name': 'Option'
},
'id': ('sentry.db.models.fields.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key':
('django.db.models.fields.CharField', [], {
'unique': 'True',
'max_length': '64'
}),
'last_updated':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'value': ('sentry.db.models.fields.pickle.UnicodePickledObjectField', [], {})
},
'sentry.organization': {
'Meta': {
'object_name': 'Organization'
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id': ('sentry.db.models.fields.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'members': (
'django.db.models.fields.related.ManyToManyField', [], {
'related_name': "'org_memberships'",
'symmetrical': 'False',
'through': "orm['sentry.OrganizationMember']",
'to': "orm['sentry.User']"
}
),
'name': ('django.db.models.fields.CharField', [], {
'max_length': '64'
}),
'owner':
('sentry.db.models.fields.FlexibleForeignKey', [], {
'to': "orm['sentry.User']"
}),
'status': ('django.db.models.fields.PositiveIntegerField', [], {
'default': '0'
})
},
'sentry.organizationmember': {
'Meta': {
'unique_together': "(('organization', 'user'),)",
'object_name': 'OrganizationMember'
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id': ('sentry.db.models.fields.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'organization': (
'sentry.db.models.fields.FlexibleForeignKey', [], {
'related_name': "'member_set'",
'to': "orm['sentry.Organization']"
}
),
'type': ('django.db.models.fields.PositiveIntegerField', [], {
'default': '50'
}),
'user': (
'sentry.db.models.fields.FlexibleForeignKey', [], {
'related_name': "'sentry_orgmember_set'",
'to': "orm['sentry.User']"
}
)
},
'sentry.pendingteammember': {
'Meta': {
'unique_together': "(('team', 'email'),)",
'object_name': 'PendingTeamMember'
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'email': ('django.db.models.fields.EmailField', [], {
'max_length': '75'
}),
'id': ('sentry.db.models.fields.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'team': (
'sentry.db.models.fields.FlexibleForeignKey', [], {
'related_name': "'pending_member_set'",
'to': "orm['sentry.Team']"
}
),
'type': ('django.db.models.fields.IntegerField', [], {
'default': '50'
})
},
'sentry.project': {
'Meta': {
'unique_together': "(('team', 'slug'),)",
'object_name': 'Project'
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id': ('sentry.db.models.fields.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'name': ('django.db.models.fields.CharField', [], {
'max_length': '200'
}),
'platform':
('django.db.models.fields.CharField', [], {
'max_length': '32',
'null': 'True'
}),
'public': ('django.db.models.fields.BooleanField', [], {
'default': 'False'
}),
'slug': ('django.db.models.fields.SlugField', [], {
'max_length': '50',
'null': 'True'
}),
'status': (
'django.db.models.fields.PositiveIntegerField', [], {
'default': '0',
'db_index': 'True'
}
),
'team': (
'sentry.db.models.fields.FlexibleForeignKey', [], {
'to': "orm['sentry.Team']",
'null': 'True'
}
)
},
'sentry.projectkey': {
'Meta': {
'object_name': 'ProjectKey'
},
'date_added': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'null': 'True'
}
),
'id': ('sentry.db.models.fields.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'label': (
'django.db.models.fields.CharField', [], {
'max_length': '64',
'null': 'True',
'blank': 'True'
}
),
'project': (
'sentry.db.models.fields.FlexibleForeignKey', [], {
'related_name': "'key_set'",
'to': "orm['sentry.Project']"
}
),
'public_key': (
'django.db.models.fields.CharField', [], {
'max_length': '32',
'unique': 'True',
'null': 'True'
}
),
'roles': ('django.db.models.fields.BigIntegerField', [], {
'default': '1'
}),
'secret_key': (
'django.db.models.fields.CharField', [], {
'max_length': '32',
'unique': 'True',
'null': 'True'
}
),
'user': (
'sentry.db.models.fields.FlexibleForeignKey', [], {
'to': "orm['sentry.User']",
'null': 'True'
}
),
'user_added': (
'sentry.db.models.fields.FlexibleForeignKey', [], {
'related_name': "'keys_added_set'",
'null': 'True',
'to': "orm['sentry.User']"
}
)
},
'sentry.projectoption': {
'Meta': {
'unique_together': "(('project', 'key'),)",
'object_name': 'ProjectOption',
'db_table': "'sentry_projectoptions'"
},
'id': ('sentry.db.models.fields.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key': ('django.db.models.fields.CharField', [], {
'max_length': '64'
}),
'project':
('sentry.db.models.fields.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}),
'value': ('sentry.db.models.fields.pickle.UnicodePickledObjectField', [], {})
},
'sentry.release': {
'Meta': {
'unique_together': "(('project', 'version'),)",
'object_name': 'Release'
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id': ('sentry.db.models.fields.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'project':
('sentry.db.models.fields.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}),
'version': ('django.db.models.fields.CharField', [], {
'max_length': '64'
})
},
'sentry.rule': {
'Meta': {
'object_name': 'Rule'
},
'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {}),
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id': ('sentry.db.models.fields.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'label': ('django.db.models.fields.CharField', [], {
'max_length': '64'
}),
'project':
('sentry.db.models.fields.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
})
},
'sentry.tagkey': {
'Meta': {
'unique_together': "(('project', 'key'),)",
'object_name': 'TagKey',
'db_table': "'sentry_filterkey'"
},
'id': ('sentry.db.models.fields.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key': ('django.db.models.fields.CharField', [], {
'max_length': '32'
}),
'label':
('django.db.models.fields.CharField', [], {
'max_length': '64',
'null': 'True'
}),
'project':
('sentry.db.models.fields.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}),
'values_seen': ('django.db.models.fields.PositiveIntegerField', [], {
'default': '0'
})
},
'sentry.tagvalue': {
'Meta': {
'unique_together': "(('project', 'key', 'value'),)",
'object_name': 'TagValue',
'db_table': "'sentry_filtervalue'"
},
'data': ('django.db.models.fields.TextField', [], {
'null': 'True',
'blank': 'True'
}),
'first_seen': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'null': 'True',
'db_index': 'True'
}
),
'id': ('sentry.db.models.fields.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key': ('django.db.models.fields.CharField', [], {
'max_length': '32'
}),
'last_seen': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'null': 'True',
'db_index': 'True'
}
),
'project': (
'sentry.db.models.fields.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']",
'null': 'True'
}
),
'times_seen': ('django.db.models.fields.PositiveIntegerField', [], {
'default': '0'
}),
'value': ('django.db.models.fields.CharField', [], {
'max_length': '200'
})
},
'sentry.team': {
'Meta': {
'object_name': 'Team'
},
'date_added': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'null': 'True'
}
),
'id': ('sentry.db.models.fields.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'members': (
'django.db.models.fields.related.ManyToManyField', [], {
'related_name': "'team_memberships'",
'symmetrical': 'False',
'through': "orm['sentry.TeamMember']",
'to': "orm['sentry.User']"
}
),
'name': ('django.db.models.fields.CharField', [], {
'max_length': '64'
}),
'organization': (
'sentry.db.models.fields.FlexibleForeignKey', [], {
'to': "orm['sentry.Organization']",
'null': 'True'
}
),
'owner':
('sentry.db.models.fields.FlexibleForeignKey', [], {
'to': "orm['sentry.User']"
}),
'slug':
('django.db.models.fields.SlugField', [], {
'unique': 'True',
'max_length': '50'
}),
'status': ('django.db.models.fields.PositiveIntegerField', [], {
'default': '0'
})
},
'sentry.teammember': {
'Meta': {
'unique_together': "(('team', 'user'),)",
'object_name': 'TeamMember'
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id': ('sentry.db.models.fields.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'team': (
'sentry.db.models.fields.FlexibleForeignKey', [], {
'related_name': "'member_set'",
'to': "orm['sentry.Team']"
}
),
'type': ('django.db.models.fields.IntegerField', [], {
'default': '50'
}),
'user': (
'sentry.db.models.fields.FlexibleForeignKey', [], {
'related_name': "'sentry_teammember_set'",
'to': "orm['sentry.User']"
}
)
},
'sentry.user': {
'Meta': {
'object_name': 'User',
'db_table': "'auth_user'"
},
'date_joined':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'email':
('django.db.models.fields.EmailField', [], {
'max_length': '75',
'blank': 'True'
}),
'first_name':
('django.db.models.fields.CharField', [], {
'max_length': '30',
'blank': 'True'
}),
'id': ('django.db.models.fields.AutoField', [], {
'primary_key': 'True'
}),
'is_active': ('django.db.models.fields.BooleanField', [], {
'default': 'True'
}),
'is_managed': ('django.db.models.fields.BooleanField', [], {
'default': 'False'
}),
'is_staff': ('django.db.models.fields.BooleanField', [], {
'default': 'False'
}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {
'default': 'False'
}),
'last_login':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'last_name':
('django.db.models.fields.CharField', [], {
'max_length': '30',
'blank': 'True'
}),
'password': ('django.db.models.fields.CharField', [], {
'max_length': '128'
}),
'username':
('django.db.models.fields.CharField', [], {
'unique': 'True',
'max_length': '128'
})
},
'sentry.useroption': {
'Meta': {
'unique_together': "(('user', 'project', 'key'),)",
'object_name': 'UserOption'
},
'id': ('sentry.db.models.fields.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key': ('django.db.models.fields.CharField', [], {
'max_length': '64'
}),
'project': (
'sentry.db.models.fields.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']",
'null': 'True'
}
),
'user':
('sentry.db.models.fields.FlexibleForeignKey', [], {
'to': "orm['sentry.User']"
}),
'value': ('sentry.db.models.fields.pickle.UnicodePickledObjectField', [], {})
}
}
complete_apps = ['sentry']
|
bsd-3-clause
|
jarvys/django-1.7-jdb
|
tests/invalid_models_tests/test_relative_fields.py
|
14
|
45353
|
# -*- encoding: utf-8 -*-
from __future__ import unicode_literals
from django.core.checks import Error
from django.db import models
from django.test.utils import override_settings
from django.test.testcases import skipIfDBFeature
from .base import IsolatedModelsTestCase
class RelativeFieldTests(IsolatedModelsTestCase):
def test_valid_foreign_key_without_accessor(self):
class Target(models.Model):
# There would be a clash if Model.field installed an accessor.
model = models.IntegerField()
class Model(models.Model):
field = models.ForeignKey(Target, related_name='+')
field = Model._meta.get_field('field')
errors = field.check()
self.assertEqual(errors, [])
def test_foreign_key_to_missing_model(self):
# Model names are resolved when a model is being created, so we cannot
# test relative fields in isolation and we need to attach them to a
# model.
class Model(models.Model):
foreign_key = models.ForeignKey('Rel1')
field = Model._meta.get_field('foreign_key')
errors = field.check()
expected = [
Error(
("Field defines a relation with model 'Rel1', "
"which is either not installed, or is abstract."),
hint=None,
obj=field,
id='fields.E300',
),
]
self.assertEqual(errors, expected)
def test_many_to_many_to_missing_model(self):
class Model(models.Model):
m2m = models.ManyToManyField("Rel2")
field = Model._meta.get_field('m2m')
errors = field.check(from_model=Model)
expected = [
Error(
("Field defines a relation with model 'Rel2', "
"which is either not installed, or is abstract."),
hint=None,
obj=field,
id='fields.E300',
),
]
self.assertEqual(errors, expected)
def test_ambiguous_relationship_model(self):
class Person(models.Model):
pass
class Group(models.Model):
field = models.ManyToManyField('Person',
through="AmbiguousRelationship", related_name='tertiary')
class AmbiguousRelationship(models.Model):
# Too much foreign keys to Person.
first_person = models.ForeignKey(Person, related_name="first")
second_person = models.ForeignKey(Person, related_name="second")
second_model = models.ForeignKey(Group)
field = Group._meta.get_field('field')
errors = field.check(from_model=Group)
expected = [
Error(
("The model is used as an intermediate model by "
"'invalid_models_tests.Group.field', but it has more than one "
"foreign key to 'Person', which is ambiguous. You must specify "
"which foreign key Django should use via the through_fields "
"keyword argument."),
hint=('If you want to create a recursive relationship, use '
'ForeignKey("self", symmetrical=False, '
'through="AmbiguousRelationship").'),
obj=field,
id='fields.E335',
),
]
self.assertEqual(errors, expected)
def test_relationship_model_with_foreign_key_to_wrong_model(self):
class WrongModel(models.Model):
pass
class Person(models.Model):
pass
class Group(models.Model):
members = models.ManyToManyField('Person',
through="InvalidRelationship")
class InvalidRelationship(models.Model):
person = models.ForeignKey(Person)
wrong_foreign_key = models.ForeignKey(WrongModel)
# The last foreign key should point to Group model.
field = Group._meta.get_field('members')
errors = field.check(from_model=Group)
expected = [
Error(
("The model is used as an intermediate model by "
"'invalid_models_tests.Group.members', but it does not "
"have a foreign key to 'Group' or 'Person'."),
hint=None,
obj=InvalidRelationship,
id='fields.E336',
),
]
self.assertEqual(errors, expected)
def test_relationship_model_missing_foreign_key(self):
class Person(models.Model):
pass
class Group(models.Model):
members = models.ManyToManyField('Person',
through="InvalidRelationship")
class InvalidRelationship(models.Model):
group = models.ForeignKey(Group)
# No foreign key to Person
field = Group._meta.get_field('members')
errors = field.check(from_model=Group)
expected = [
Error(
("The model is used as an intermediate model by "
"'invalid_models_tests.Group.members', but it does not have "
"a foreign key to 'Group' or 'Person'."),
hint=None,
obj=InvalidRelationship,
id='fields.E336',
),
]
self.assertEqual(errors, expected)
def test_missing_relationship_model(self):
class Person(models.Model):
pass
class Group(models.Model):
members = models.ManyToManyField('Person',
through="MissingM2MModel")
field = Group._meta.get_field('members')
errors = field.check(from_model=Group)
expected = [
Error(
("Field specifies a many-to-many relation through model "
"'MissingM2MModel', which has not been installed."),
hint=None,
obj=field,
id='fields.E331',
),
]
self.assertEqual(errors, expected)
def test_symmetrical_self_referential_field(self):
class Person(models.Model):
# Implicit symmetrical=False.
friends = models.ManyToManyField('self', through="Relationship")
class Relationship(models.Model):
first = models.ForeignKey(Person, related_name="rel_from_set")
second = models.ForeignKey(Person, related_name="rel_to_set")
field = Person._meta.get_field('friends')
errors = field.check(from_model=Person)
expected = [
Error(
'Many-to-many fields with intermediate tables must not be symmetrical.',
hint=None,
obj=field,
id='fields.E332',
),
]
self.assertEqual(errors, expected)
def test_too_many_foreign_keys_in_self_referential_model(self):
class Person(models.Model):
friends = models.ManyToManyField('self',
through="InvalidRelationship", symmetrical=False)
class InvalidRelationship(models.Model):
first = models.ForeignKey(Person, related_name="rel_from_set_2")
second = models.ForeignKey(Person, related_name="rel_to_set_2")
third = models.ForeignKey(Person, related_name="too_many_by_far")
field = Person._meta.get_field('friends')
errors = field.check(from_model=Person)
expected = [
Error(
("The model is used as an intermediate model by "
"'invalid_models_tests.Person.friends', but it has more than two "
"foreign keys to 'Person', which is ambiguous. You must specify "
"which two foreign keys Django should use via the through_fields "
"keyword argument."),
hint='Use through_fields to specify which two foreign keys Django should use.',
obj=InvalidRelationship,
id='fields.E333',
),
]
self.assertEqual(errors, expected)
def test_symmetric_self_reference_with_intermediate_table(self):
class Person(models.Model):
# Explicit symmetrical=True.
friends = models.ManyToManyField('self',
through="Relationship", symmetrical=True)
class Relationship(models.Model):
first = models.ForeignKey(Person, related_name="rel_from_set")
second = models.ForeignKey(Person, related_name="rel_to_set")
field = Person._meta.get_field('friends')
errors = field.check(from_model=Person)
expected = [
Error(
'Many-to-many fields with intermediate tables must not be symmetrical.',
hint=None,
obj=field,
id='fields.E332',
),
]
self.assertEqual(errors, expected)
def test_symmetric_self_reference_with_intermediate_table_and_through_fields(self):
"""Using through_fields in a m2m with an intermediate model shouldn't mask its incompatibility with symmetry."""
class Person(models.Model):
# Explicit symmetrical=True.
friends = models.ManyToManyField('self',
symmetrical=True,
through="Relationship",
through_fields=('first', 'second'))
class Relationship(models.Model):
first = models.ForeignKey(Person, related_name="rel_from_set")
second = models.ForeignKey(Person, related_name="rel_to_set")
referee = models.ForeignKey(Person, related_name="referred")
field = Person._meta.get_field('friends')
errors = field.check(from_model=Person)
expected = [
Error(
'Many-to-many fields with intermediate tables must not be symmetrical.',
hint=None,
obj=field,
id='fields.E332',
),
]
self.assertEqual(errors, expected)
def test_foreign_key_to_abstract_model(self):
class Model(models.Model):
foreign_key = models.ForeignKey('AbstractModel')
class AbstractModel(models.Model):
class Meta:
abstract = True
field = Model._meta.get_field('foreign_key')
errors = field.check()
expected = [
Error(
("Field defines a relation with model 'AbstractModel', "
"which is either not installed, or is abstract."),
hint=None,
obj=field,
id='fields.E300',
),
]
self.assertEqual(errors, expected)
def test_m2m_to_abstract_model(self):
class AbstractModel(models.Model):
class Meta:
abstract = True
class Model(models.Model):
m2m = models.ManyToManyField('AbstractModel')
field = Model._meta.get_field('m2m')
errors = field.check(from_model=Model)
expected = [
Error(
("Field defines a relation with model 'AbstractModel', "
"which is either not installed, or is abstract."),
hint=None,
obj=field,
id='fields.E300',
),
]
self.assertEqual(errors, expected)
def test_unique_m2m(self):
class Person(models.Model):
name = models.CharField(max_length=5)
class Group(models.Model):
members = models.ManyToManyField('Person', unique=True)
field = Group._meta.get_field('members')
errors = field.check(from_model=Group)
expected = [
Error(
'ManyToManyFields cannot be unique.',
hint=None,
obj=field,
id='fields.E330',
),
]
self.assertEqual(errors, expected)
def test_foreign_key_to_non_unique_field(self):
class Target(models.Model):
bad = models.IntegerField() # No unique=True
class Model(models.Model):
foreign_key = models.ForeignKey('Target', to_field='bad')
field = Model._meta.get_field('foreign_key')
errors = field.check()
expected = [
Error(
"'Target.bad' must set unique=True because it is referenced by a foreign key.",
hint=None,
obj=field,
id='fields.E311',
),
]
self.assertEqual(errors, expected)
def test_foreign_key_to_non_unique_field_under_explicit_model(self):
class Target(models.Model):
bad = models.IntegerField()
class Model(models.Model):
field = models.ForeignKey(Target, to_field='bad')
field = Model._meta.get_field('field')
errors = field.check()
expected = [
Error(
"'Target.bad' must set unique=True because it is referenced by a foreign key.",
hint=None,
obj=field,
id='fields.E311',
),
]
self.assertEqual(errors, expected)
def test_foreign_object_to_non_unique_fields(self):
class Person(models.Model):
# Note that both fields are not unique.
country_id = models.IntegerField()
city_id = models.IntegerField()
class MMembership(models.Model):
person_country_id = models.IntegerField()
person_city_id = models.IntegerField()
person = models.ForeignObject(Person,
from_fields=['person_country_id', 'person_city_id'],
to_fields=['country_id', 'city_id'])
field = MMembership._meta.get_field('person')
errors = field.check()
expected = [
Error(
("None of the fields 'country_id', 'city_id' on model 'Person' "
"have a unique=True constraint."),
hint=None,
obj=field,
id='fields.E310',
)
]
self.assertEqual(errors, expected)
def test_on_delete_set_null_on_non_nullable_field(self):
class Person(models.Model):
pass
class Model(models.Model):
foreign_key = models.ForeignKey('Person',
on_delete=models.SET_NULL)
field = Model._meta.get_field('foreign_key')
errors = field.check()
expected = [
Error(
'Field specifies on_delete=SET_NULL, but cannot be null.',
hint='Set null=True argument on the field, or change the on_delete rule.',
obj=field,
id='fields.E320',
),
]
self.assertEqual(errors, expected)
def test_on_delete_set_default_without_default_value(self):
class Person(models.Model):
pass
class Model(models.Model):
foreign_key = models.ForeignKey('Person',
on_delete=models.SET_DEFAULT)
field = Model._meta.get_field('foreign_key')
errors = field.check()
expected = [
Error(
'Field specifies on_delete=SET_DEFAULT, but has no default value.',
hint='Set a default value, or change the on_delete rule.',
obj=field,
id='fields.E321',
),
]
self.assertEqual(errors, expected)
@skipIfDBFeature('interprets_empty_strings_as_nulls')
def test_nullable_primary_key(self):
class Model(models.Model):
field = models.IntegerField(primary_key=True, null=True)
field = Model._meta.get_field('field')
errors = field.check()
expected = [
Error(
'Primary keys must not have null=True.',
hint='Set null=False on the field, or remove primary_key=True argument.',
obj=field,
id='fields.E007',
),
]
self.assertEqual(errors, expected)
def test_not_swapped_model(self):
class SwappableModel(models.Model):
# A model that can be, but isn't swapped out. References to this
# model should *not* raise any validation error.
class Meta:
swappable = 'TEST_SWAPPABLE_MODEL'
class Model(models.Model):
explicit_fk = models.ForeignKey(SwappableModel,
related_name='explicit_fk')
implicit_fk = models.ForeignKey('invalid_models_tests.SwappableModel',
related_name='implicit_fk')
explicit_m2m = models.ManyToManyField(SwappableModel,
related_name='explicit_m2m')
implicit_m2m = models.ManyToManyField(
'invalid_models_tests.SwappableModel',
related_name='implicit_m2m')
explicit_fk = Model._meta.get_field('explicit_fk')
self.assertEqual(explicit_fk.check(), [])
implicit_fk = Model._meta.get_field('implicit_fk')
self.assertEqual(implicit_fk.check(), [])
explicit_m2m = Model._meta.get_field('explicit_m2m')
self.assertEqual(explicit_m2m.check(from_model=Model), [])
implicit_m2m = Model._meta.get_field('implicit_m2m')
self.assertEqual(implicit_m2m.check(from_model=Model), [])
@override_settings(TEST_SWAPPED_MODEL='invalid_models_tests.Replacement')
def test_referencing_to_swapped_model(self):
class Replacement(models.Model):
pass
class SwappedModel(models.Model):
class Meta:
swappable = 'TEST_SWAPPED_MODEL'
class Model(models.Model):
explicit_fk = models.ForeignKey(SwappedModel,
related_name='explicit_fk')
implicit_fk = models.ForeignKey('invalid_models_tests.SwappedModel',
related_name='implicit_fk')
explicit_m2m = models.ManyToManyField(SwappedModel,
related_name='explicit_m2m')
implicit_m2m = models.ManyToManyField(
'invalid_models_tests.SwappedModel',
related_name='implicit_m2m')
fields = [
Model._meta.get_field('explicit_fk'),
Model._meta.get_field('implicit_fk'),
Model._meta.get_field('explicit_m2m'),
Model._meta.get_field('implicit_m2m'),
]
expected_error = Error(
("Field defines a relation with the model "
"'invalid_models_tests.SwappedModel', which has been swapped out."),
hint="Update the relation to point at 'settings.TEST_SWAPPED_MODEL'.",
id='fields.E301',
)
for field in fields:
expected_error.obj = field
errors = field.check(from_model=Model)
self.assertEqual(errors, [expected_error])
class AccessorClashTests(IsolatedModelsTestCase):
def test_fk_to_integer(self):
self._test_accessor_clash(
target=models.IntegerField(),
relative=models.ForeignKey('Target'))
def test_fk_to_fk(self):
self._test_accessor_clash(
target=models.ForeignKey('Another'),
relative=models.ForeignKey('Target'))
def test_fk_to_m2m(self):
self._test_accessor_clash(
target=models.ManyToManyField('Another'),
relative=models.ForeignKey('Target'))
def test_m2m_to_integer(self):
self._test_accessor_clash(
target=models.IntegerField(),
relative=models.ManyToManyField('Target'))
def test_m2m_to_fk(self):
self._test_accessor_clash(
target=models.ForeignKey('Another'),
relative=models.ManyToManyField('Target'))
def test_m2m_to_m2m(self):
self._test_accessor_clash(
target=models.ManyToManyField('Another'),
relative=models.ManyToManyField('Target'))
def _test_accessor_clash(self, target, relative):
class Another(models.Model):
pass
class Target(models.Model):
model_set = target
class Model(models.Model):
rel = relative
errors = Model.check()
expected = [
Error(
"Reverse accessor for 'Model.rel' clashes with field name 'Target.model_set'.",
hint=("Rename field 'Target.model_set', or add/change "
"a related_name argument to the definition "
"for field 'Model.rel'."),
obj=Model._meta.get_field('rel'),
id='fields.E302',
),
]
self.assertEqual(errors, expected)
def test_clash_between_accessors(self):
class Target(models.Model):
pass
class Model(models.Model):
foreign = models.ForeignKey(Target)
m2m = models.ManyToManyField(Target)
errors = Model.check()
expected = [
Error(
"Reverse accessor for 'Model.foreign' clashes with reverse accessor for 'Model.m2m'.",
hint=("Add or change a related_name argument to the definition "
"for 'Model.foreign' or 'Model.m2m'."),
obj=Model._meta.get_field('foreign'),
id='fields.E304',
),
Error(
"Reverse accessor for 'Model.m2m' clashes with reverse accessor for 'Model.foreign'.",
hint=("Add or change a related_name argument to the definition "
"for 'Model.m2m' or 'Model.foreign'."),
obj=Model._meta.get_field('m2m'),
id='fields.E304',
),
]
self.assertEqual(errors, expected)
def test_m2m_to_m2m_with_inheritance(self):
""" Ref #22047. """
class Target(models.Model):
pass
class Model(models.Model):
children = models.ManyToManyField('Child',
related_name="m2m_clash", related_query_name="no_clash")
class Parent(models.Model):
m2m_clash = models.ManyToManyField('Target')
class Child(Parent):
pass
errors = Model.check()
expected = [
Error(
"Reverse accessor for 'Model.children' clashes with field name 'Child.m2m_clash'.",
hint=("Rename field 'Child.m2m_clash', or add/change "
"a related_name argument to the definition "
"for field 'Model.children'."),
obj=Model._meta.get_field('children'),
id='fields.E302',
)
]
self.assertEqual(errors, expected)
class ReverseQueryNameClashTests(IsolatedModelsTestCase):
def test_fk_to_integer(self):
self._test_reverse_query_name_clash(
target=models.IntegerField(),
relative=models.ForeignKey('Target'))
def test_fk_to_fk(self):
self._test_reverse_query_name_clash(
target=models.ForeignKey('Another'),
relative=models.ForeignKey('Target'))
def test_fk_to_m2m(self):
self._test_reverse_query_name_clash(
target=models.ManyToManyField('Another'),
relative=models.ForeignKey('Target'))
def test_m2m_to_integer(self):
self._test_reverse_query_name_clash(
target=models.IntegerField(),
relative=models.ManyToManyField('Target'))
def test_m2m_to_fk(self):
self._test_reverse_query_name_clash(
target=models.ForeignKey('Another'),
relative=models.ManyToManyField('Target'))
def test_m2m_to_m2m(self):
self._test_reverse_query_name_clash(
target=models.ManyToManyField('Another'),
relative=models.ManyToManyField('Target'))
def _test_reverse_query_name_clash(self, target, relative):
class Another(models.Model):
pass
class Target(models.Model):
model = target
class Model(models.Model):
rel = relative
errors = Model.check()
expected = [
Error(
"Reverse query name for 'Model.rel' clashes with field name 'Target.model'.",
hint=("Rename field 'Target.model', or add/change "
"a related_name argument to the definition "
"for field 'Model.rel'."),
obj=Model._meta.get_field('rel'),
id='fields.E303',
),
]
self.assertEqual(errors, expected)
class ExplicitRelatedNameClashTests(IsolatedModelsTestCase):
def test_fk_to_integer(self):
self._test_explicit_related_name_clash(
target=models.IntegerField(),
relative=models.ForeignKey('Target', related_name='clash'))
def test_fk_to_fk(self):
self._test_explicit_related_name_clash(
target=models.ForeignKey('Another'),
relative=models.ForeignKey('Target', related_name='clash'))
def test_fk_to_m2m(self):
self._test_explicit_related_name_clash(
target=models.ManyToManyField('Another'),
relative=models.ForeignKey('Target', related_name='clash'))
def test_m2m_to_integer(self):
self._test_explicit_related_name_clash(
target=models.IntegerField(),
relative=models.ManyToManyField('Target', related_name='clash'))
def test_m2m_to_fk(self):
self._test_explicit_related_name_clash(
target=models.ForeignKey('Another'),
relative=models.ManyToManyField('Target', related_name='clash'))
def test_m2m_to_m2m(self):
self._test_explicit_related_name_clash(
target=models.ManyToManyField('Another'),
relative=models.ManyToManyField('Target', related_name='clash'))
def _test_explicit_related_name_clash(self, target, relative):
class Another(models.Model):
pass
class Target(models.Model):
clash = target
class Model(models.Model):
rel = relative
errors = Model.check()
expected = [
Error(
"Reverse accessor for 'Model.rel' clashes with field name 'Target.clash'.",
hint=("Rename field 'Target.clash', or add/change "
"a related_name argument to the definition "
"for field 'Model.rel'."),
obj=Model._meta.get_field('rel'),
id='fields.E302',
),
Error(
"Reverse query name for 'Model.rel' clashes with field name 'Target.clash'.",
hint=("Rename field 'Target.clash', or add/change "
"a related_name argument to the definition "
"for field 'Model.rel'."),
obj=Model._meta.get_field('rel'),
id='fields.E303',
),
]
self.assertEqual(errors, expected)
class ExplicitRelatedQueryNameClashTests(IsolatedModelsTestCase):
def test_fk_to_integer(self):
self._test_explicit_related_query_name_clash(
target=models.IntegerField(),
relative=models.ForeignKey('Target',
related_query_name='clash'))
def test_fk_to_fk(self):
self._test_explicit_related_query_name_clash(
target=models.ForeignKey('Another'),
relative=models.ForeignKey('Target',
related_query_name='clash'))
def test_fk_to_m2m(self):
self._test_explicit_related_query_name_clash(
target=models.ManyToManyField('Another'),
relative=models.ForeignKey('Target',
related_query_name='clash'))
def test_m2m_to_integer(self):
self._test_explicit_related_query_name_clash(
target=models.IntegerField(),
relative=models.ManyToManyField('Target',
related_query_name='clash'))
def test_m2m_to_fk(self):
self._test_explicit_related_query_name_clash(
target=models.ForeignKey('Another'),
relative=models.ManyToManyField('Target',
related_query_name='clash'))
def test_m2m_to_m2m(self):
self._test_explicit_related_query_name_clash(
target=models.ManyToManyField('Another'),
relative=models.ManyToManyField('Target',
related_query_name='clash'))
def _test_explicit_related_query_name_clash(self, target, relative):
class Another(models.Model):
pass
class Target(models.Model):
clash = target
class Model(models.Model):
rel = relative
errors = Model.check()
expected = [
Error(
"Reverse query name for 'Model.rel' clashes with field name 'Target.clash'.",
hint=("Rename field 'Target.clash', or add/change a related_name "
"argument to the definition for field 'Model.rel'."),
obj=Model._meta.get_field('rel'),
id='fields.E303',
),
]
self.assertEqual(errors, expected)
class SelfReferentialM2MClashTests(IsolatedModelsTestCase):
def test_clash_between_accessors(self):
class Model(models.Model):
first_m2m = models.ManyToManyField('self', symmetrical=False)
second_m2m = models.ManyToManyField('self', symmetrical=False)
errors = Model.check()
expected = [
Error(
"Reverse accessor for 'Model.first_m2m' clashes with reverse accessor for 'Model.second_m2m'.",
hint=("Add or change a related_name argument to the definition "
"for 'Model.first_m2m' or 'Model.second_m2m'."),
obj=Model._meta.get_field('first_m2m'),
id='fields.E304',
),
Error(
"Reverse accessor for 'Model.second_m2m' clashes with reverse accessor for 'Model.first_m2m'.",
hint=("Add or change a related_name argument to the definition "
"for 'Model.second_m2m' or 'Model.first_m2m'."),
obj=Model._meta.get_field('second_m2m'),
id='fields.E304',
),
]
self.assertEqual(errors, expected)
def test_accessor_clash(self):
class Model(models.Model):
model_set = models.ManyToManyField("self", symmetrical=False)
errors = Model.check()
expected = [
Error(
"Reverse accessor for 'Model.model_set' clashes with field name 'Model.model_set'.",
hint=("Rename field 'Model.model_set', or add/change "
"a related_name argument to the definition "
"for field 'Model.model_set'."),
obj=Model._meta.get_field('model_set'),
id='fields.E302',
),
]
self.assertEqual(errors, expected)
def test_reverse_query_name_clash(self):
class Model(models.Model):
model = models.ManyToManyField("self", symmetrical=False)
errors = Model.check()
expected = [
Error(
"Reverse query name for 'Model.model' clashes with field name 'Model.model'.",
hint=("Rename field 'Model.model', or add/change a related_name "
"argument to the definition for field 'Model.model'."),
obj=Model._meta.get_field('model'),
id='fields.E303',
),
]
self.assertEqual(errors, expected)
def test_clash_under_explicit_related_name(self):
class Model(models.Model):
clash = models.IntegerField()
m2m = models.ManyToManyField("self",
symmetrical=False, related_name='clash')
errors = Model.check()
expected = [
Error(
"Reverse accessor for 'Model.m2m' clashes with field name 'Model.clash'.",
hint=("Rename field 'Model.clash', or add/change a related_name "
"argument to the definition for field 'Model.m2m'."),
obj=Model._meta.get_field('m2m'),
id='fields.E302',
),
Error(
"Reverse query name for 'Model.m2m' clashes with field name 'Model.clash'.",
hint=("Rename field 'Model.clash', or add/change a related_name "
"argument to the definition for field 'Model.m2m'."),
obj=Model._meta.get_field('m2m'),
id='fields.E303',
),
]
self.assertEqual(errors, expected)
def test_valid_model(self):
class Model(models.Model):
first = models.ManyToManyField("self",
symmetrical=False, related_name='first_accessor')
second = models.ManyToManyField("self",
symmetrical=False, related_name='second_accessor')
errors = Model.check()
self.assertEqual(errors, [])
class SelfReferentialFKClashTests(IsolatedModelsTestCase):
def test_accessor_clash(self):
class Model(models.Model):
model_set = models.ForeignKey("Model")
errors = Model.check()
expected = [
Error(
"Reverse accessor for 'Model.model_set' clashes with field name 'Model.model_set'.",
hint=("Rename field 'Model.model_set', or add/change "
"a related_name argument to the definition "
"for field 'Model.model_set'."),
obj=Model._meta.get_field('model_set'),
id='fields.E302',
),
]
self.assertEqual(errors, expected)
def test_reverse_query_name_clash(self):
class Model(models.Model):
model = models.ForeignKey("Model")
errors = Model.check()
expected = [
Error(
"Reverse query name for 'Model.model' clashes with field name 'Model.model'.",
hint=("Rename field 'Model.model', or add/change "
"a related_name argument to the definition "
"for field 'Model.model'."),
obj=Model._meta.get_field('model'),
id='fields.E303',
),
]
self.assertEqual(errors, expected)
def test_clash_under_explicit_related_name(self):
class Model(models.Model):
clash = models.CharField(max_length=10)
foreign = models.ForeignKey("Model", related_name='clash')
errors = Model.check()
expected = [
Error(
"Reverse accessor for 'Model.foreign' clashes with field name 'Model.clash'.",
hint=("Rename field 'Model.clash', or add/change "
"a related_name argument to the definition "
"for field 'Model.foreign'."),
obj=Model._meta.get_field('foreign'),
id='fields.E302',
),
Error(
"Reverse query name for 'Model.foreign' clashes with field name 'Model.clash'.",
hint=("Rename field 'Model.clash', or add/change "
"a related_name argument to the definition "
"for field 'Model.foreign'."),
obj=Model._meta.get_field('foreign'),
id='fields.E303',
),
]
self.assertEqual(errors, expected)
class ComplexClashTests(IsolatedModelsTestCase):
# New tests should not be included here, because this is a single,
# self-contained sanity check, not a test of everything.
def test_complex_clash(self):
class Target(models.Model):
tgt_safe = models.CharField(max_length=10)
clash = models.CharField(max_length=10)
model = models.CharField(max_length=10)
clash1_set = models.CharField(max_length=10)
class Model(models.Model):
src_safe = models.CharField(max_length=10)
foreign_1 = models.ForeignKey(Target, related_name='id')
foreign_2 = models.ForeignKey(Target, related_name='src_safe')
m2m_1 = models.ManyToManyField(Target, related_name='id')
m2m_2 = models.ManyToManyField(Target, related_name='src_safe')
errors = Model.check()
expected = [
Error(
"Reverse accessor for 'Model.foreign_1' clashes with field name 'Target.id'.",
hint=("Rename field 'Target.id', or add/change a related_name "
"argument to the definition for field 'Model.foreign_1'."),
obj=Model._meta.get_field('foreign_1'),
id='fields.E302',
),
Error(
"Reverse query name for 'Model.foreign_1' clashes with field name 'Target.id'.",
hint=("Rename field 'Target.id', or add/change a related_name "
"argument to the definition for field 'Model.foreign_1'."),
obj=Model._meta.get_field('foreign_1'),
id='fields.E303',
),
Error(
"Reverse accessor for 'Model.foreign_1' clashes with reverse accessor for 'Model.m2m_1'.",
hint=("Add or change a related_name argument to "
"the definition for 'Model.foreign_1' or 'Model.m2m_1'."),
obj=Model._meta.get_field('foreign_1'),
id='fields.E304',
),
Error(
"Reverse query name for 'Model.foreign_1' clashes with reverse query name for 'Model.m2m_1'.",
hint=("Add or change a related_name argument to "
"the definition for 'Model.foreign_1' or 'Model.m2m_1'."),
obj=Model._meta.get_field('foreign_1'),
id='fields.E305',
),
Error(
"Reverse accessor for 'Model.foreign_2' clashes with reverse accessor for 'Model.m2m_2'.",
hint=("Add or change a related_name argument "
"to the definition for 'Model.foreign_2' or 'Model.m2m_2'."),
obj=Model._meta.get_field('foreign_2'),
id='fields.E304',
),
Error(
"Reverse query name for 'Model.foreign_2' clashes with reverse query name for 'Model.m2m_2'.",
hint=("Add or change a related_name argument to "
"the definition for 'Model.foreign_2' or 'Model.m2m_2'."),
obj=Model._meta.get_field('foreign_2'),
id='fields.E305',
),
Error(
"Reverse accessor for 'Model.m2m_1' clashes with field name 'Target.id'.",
hint=("Rename field 'Target.id', or add/change a related_name "
"argument to the definition for field 'Model.m2m_1'."),
obj=Model._meta.get_field('m2m_1'),
id='fields.E302',
),
Error(
"Reverse query name for 'Model.m2m_1' clashes with field name 'Target.id'.",
hint=("Rename field 'Target.id', or add/change a related_name "
"argument to the definition for field 'Model.m2m_1'."),
obj=Model._meta.get_field('m2m_1'),
id='fields.E303',
),
Error(
"Reverse accessor for 'Model.m2m_1' clashes with reverse accessor for 'Model.foreign_1'.",
hint=("Add or change a related_name argument to the definition "
"for 'Model.m2m_1' or 'Model.foreign_1'."),
obj=Model._meta.get_field('m2m_1'),
id='fields.E304',
),
Error(
"Reverse query name for 'Model.m2m_1' clashes with reverse query name for 'Model.foreign_1'.",
hint=("Add or change a related_name argument to "
"the definition for 'Model.m2m_1' or 'Model.foreign_1'."),
obj=Model._meta.get_field('m2m_1'),
id='fields.E305',
),
Error(
"Reverse accessor for 'Model.m2m_2' clashes with reverse accessor for 'Model.foreign_2'.",
hint=("Add or change a related_name argument to the definition "
"for 'Model.m2m_2' or 'Model.foreign_2'."),
obj=Model._meta.get_field('m2m_2'),
id='fields.E304',
),
Error(
"Reverse query name for 'Model.m2m_2' clashes with reverse query name for 'Model.foreign_2'.",
hint=("Add or change a related_name argument to the definition "
"for 'Model.m2m_2' or 'Model.foreign_2'."),
obj=Model._meta.get_field('m2m_2'),
id='fields.E305',
),
]
self.assertEqual(errors, expected)
class M2mThroughFieldsTests(IsolatedModelsTestCase):
def test_m2m_field_argument_validation(self):
"""
Tests that ManyToManyField accepts the ``through_fields`` kwarg
only if an intermediary table is specified.
"""
class Fan(models.Model):
pass
self.assertRaisesMessage(
ValueError, 'Cannot specify through_fields without a through model',
models.ManyToManyField, Fan, through_fields=('f1', 'f2'))
def test_invalid_order(self):
"""
Tests that mixing up the order of link fields to ManyToManyField.through_fields
triggers validation errors.
"""
class Fan(models.Model):
pass
class Event(models.Model):
invitees = models.ManyToManyField(Fan, through='Invitation', through_fields=('invitee', 'event'))
class Invitation(models.Model):
event = models.ForeignKey(Event)
invitee = models.ForeignKey(Fan)
inviter = models.ForeignKey(Fan, related_name='+')
field = Event._meta.get_field('invitees')
errors = field.check(from_model=Event)
expected = [
Error(
("'Invitation.invitee' is not a foreign key to 'Event'."),
hint="Did you mean one of the following foreign keys to 'Event': event?",
obj=field,
id='fields.E339'),
Error(
("'Invitation.event' is not a foreign key to 'Fan'."),
hint="Did you mean one of the following foreign keys to 'Fan': invitee, inviter?",
obj=field,
id='fields.E339'),
]
self.assertEqual(expected, errors)
def test_invalid_field(self):
"""
Tests that providing invalid field names to ManyToManyField.through_fields
triggers validation errors.
"""
class Fan(models.Model):
pass
class Event(models.Model):
invitees = models.ManyToManyField(Fan, through='Invitation', through_fields=('invalid_field_1', 'invalid_field_2'))
class Invitation(models.Model):
event = models.ForeignKey(Event)
invitee = models.ForeignKey(Fan)
inviter = models.ForeignKey(Fan, related_name='+')
field = Event._meta.get_field('invitees')
errors = field.check(from_model=Event)
expected = [
Error(
("The intermediary model 'invalid_models_tests.Invitation' has no field 'invalid_field_1'."),
hint="Did you mean one of the following foreign keys to 'Event': event?",
obj=field,
id='fields.E338'),
Error(
("The intermediary model 'invalid_models_tests.Invitation' has no field 'invalid_field_2'."),
hint="Did you mean one of the following foreign keys to 'Fan': invitee, inviter?",
obj=field,
id='fields.E338'),
]
self.assertEqual(expected, errors)
def test_explicit_field_names(self):
"""
Tests that if ``through_fields`` kwarg is given, it must specify both
link fields of the intermediary table.
"""
class Fan(models.Model):
pass
class Event(models.Model):
invitees = models.ManyToManyField(Fan, through='Invitation', through_fields=(None, 'invitee'))
class Invitation(models.Model):
event = models.ForeignKey(Event)
invitee = models.ForeignKey(Fan)
inviter = models.ForeignKey(Fan, related_name='+')
field = Event._meta.get_field('invitees')
errors = field.check(from_model=Event)
expected = [
Error(
("Field specifies 'through_fields' but does not provide the names "
"of the two link fields that should be used for the relation "
"through model 'invalid_models_tests.Invitation'."),
hint=("Make sure you specify 'through_fields' as "
"through_fields=('field1', 'field2')"),
obj=field,
id='fields.E337')]
self.assertEqual(expected, errors)
|
bsd-3-clause
|
lucidbard/NewsBlur
|
api/newsblur.py
|
11
|
10077
|
# Original API work by Dananjaya Ramanayake <[email protected]>
# Retooled by Samuel Clay, August 2011
# Modified by Luke Hagan, 2011-11-05
import urllib, urllib2
import cookielib
import json
__author__ = "Dananjaya Ramanayake <[email protected]>, Samuel Clay <[email protected]>"
__version__ = "1.0"
API_URL = "http://www.newsblur.com/"
# API_URL = "http://nb.local.host:8000/"
class request():
opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cookielib.CookieJar()))
def __init__(self, endpoint=None, method='get'):
self.endpoint = endpoint
self.method = method
def __call__(self, func):
def wrapped(*args, **kwargs):
params = func(*args, **kwargs) or {}
url = self.endpoint if self.endpoint else params.pop('url')
params = urllib.urlencode(params)
url = "%s%s" % (API_URL, url)
response = self.opener.open(url, params).read()
return json.loads(response)
return wrapped
class API:
@request('api/login', method='post')
def login(self, username, password):
'''
Login as an existing user.
If a user has no password set, you cannot just send any old password.
Required parameters, username and password, must be of string type.
'''
return {
'username': username,
'password': password
}
@request('api/logout')
def logout(self):
'''
Logout the currently logged in user.
'''
return
@request('api/signup')
def signup(self, username, password, email):
'''
Create a new user.
All three required parameters must be of type string.
'''
return {
'signup_username': username,
'signup_password': password,
'signup_email': email
}
@request('rss_feeds/search_feed')
def search_feed(self, address, offset=0):
'''
Retrieve information about a feed from its website or RSS address.
Parameter address must be of type string while parameter offset must be an integer.
Will return a feed.
'''
return {
'address': address,
'offset': offset
}
@request('reader/feeds')
def feeds(self, include_favicons=True, flat=False):
'''
Retrieve a list of feeds to which a user is actively subscribed.
Includes the 3 unread counts (positive, neutral, negative), as well as optional favicons.
'''
return {
'include_favicons': include_favicons,
'flat': flat
}
@request('reader/favicons')
def favicons(self, feeds=None):
'''
Retrieve a list of favicons for a list of feeds.
Used when combined with /reader/feeds and include_favicons=false, so the feeds request contains far less data.
Useful for mobile devices, but requires a second request.
'''
data = []
for feed in feeds:
data.append( ("feeds", feed) )
return data
@request()
def page(self, feed_id):
'''
Retrieve the original page from a single feed.
'''
return {
'url': 'reader/page/%s' % feed_id
}
@request()
def feed(self, feed_id, page=1):
'''
Retrieve the stories from a single feed.
'''
return {
'url': 'reader/feed/%s' % feed_id,
'page': page,
}
@request('reader/refresh_feeds')
def refresh_feeds(self):
'''
Up-to-the-second unread counts for each active feed.
Poll for these counts no more than once a minute.
'''
return
@request('reader/feeds_trainer')
def feeds_trainer(self, feed_id=None):
'''
Retrieves all popular and known intelligence classifiers.
Also includes user's own classifiers.
'''
return {
'feed_id': feed_id,
}
@request()
def statistics(self, feed_id=None):
'''
If you only want a user's classifiers, use /classifiers/:id.
Omit the feed_id to get all classifiers for all subscriptions.
'''
return {
'url': 'rss_feeds/statistics/%d' % feed_id
}
@request('rss_feeds/feed_autocomplete')
def feed_autocomplete(self, term):
'''
Get a list of feeds that contain a search phrase.
Searches by feed address, feed url, and feed title, in that order.
Will only show sites with 2+ subscribers.
'''
return {
'term': term
}
@request('reader/starred_stories')
def starred_stories(self, page=1):
'''
Retrieve a user's starred stories.
'''
return {
'page': page,
}
@request('reader/river_stories')
def river_stories(self, feeds, page=1, read_stories_count=0):
'''
Retrieve stories from a collection of feeds. This is known as the River of News.
Stories are ordered in reverse chronological order.
`read_stories_count` is the number of stories that have been read in this
continuation, so NewsBlur can efficiently skip those stories when retrieving
new stories. Takes an array of feed ids.
'''
data = [ ('page', page), ('read_stories_count', read_stories_count) ]
for feed in feeds:
data.append( ("feeds", feed) )
return data
@request('reader/mark_story_as_read')
def mark_story_as_read(self, feed_id, story_ids):
'''
Mark stories as read.
Multiple story ids can be sent at once.
Each story must be from the same feed.
Takes an array of story ids.
'''
data = [ ('feed_id', feed_id) ]
for story_id in story_ids:
data.append( ("story_id", story_id) )
return data
@request('reader/mark_story_as_starred')
def mark_story_as_starred(self, feed_id, story_id):
'''
Mark a story as starred (saved).
'''
return {
'feed_id': feed_id,
'story_id': story_id,
}
@request('reader/mark_all_as_read')
def mark_all_as_read(self, days=0):
'''
Mark all stories in a feed or list of feeds as read.
'''
return {
'days': days,
}
@request('reader/add_url')
def add_url(self, url, folder=''):
'''
Add a feed by its URL.
Can be either the RSS feed or the website itself.
'''
return {
'url': url,
'folder': folder,
}
@request('reader/add_folder')
def add_folder(self, folder, parent_folder=''):
'''
Add a new folder.
'''
return {
'folder': folder,
'parent_folder': parent_folder,
}
@request('reader/rename_feed')
def rename_feed(self, feed_id, feed_title):
'''
Rename a feed title. Only the current user will see the new title.
'''
return {
'feed_id': feed_id,
'feed_title': feed_title,
}
@request('reader/delete_feed')
def delete_feed(self, feed_id, in_folder):
'''
Unsubscribe from a feed. Removes it from the folder.
Set the in_folder parameter to remove a feed from the correct
folder, in case the user is subscribed to the feed in multiple folders.
'''
return {
'feed_id': feed_id,
'in_folder': in_folder,
}
@request('reader/rename_folder')
def rename_folder(self, folder_to_rename, new_folder_name, in_folder):
'''
Rename a folder.
'''
return {
'folder_to_rename': folder_to_rename,
'new_folder_name': new_folder_name,
'in_folder': in_folder,
}
@request('reader/delete_folder')
def delete_folder(self, folder_to_delete, in_folder):
'''
Delete a folder and unsubscribe from all feeds inside.
'''
return {
'folder_to_delete': folder_to_delete,
'in_folder': in_folder,
}
@request('reader/mark_feed_as_read')
def mark_feed_as_read(self, feed_ids):
'''
Mark a list of feeds as read.
Takes an array of feeds.
'''
data = []
for feed in feed_ids:
data.append( ("feed_id", feed) )
return data
@request('reader/save_feed_order')
def save_feed_order(self, folders):
'''
Reorder feeds and move them around between folders.
The entire folder structure needs to be serialized.
'''
return {
'folders': folders,
}
@request()
def classifier(self, feed_id):
'''
Get the intelligence classifiers for a user's site.
Only includes the user's own classifiers.
Use /reader/feeds_trainer for popular classifiers.
'''
return {
'url': '/classifier/%d' % feed_id,
}
@request('classifier/save')
def classifier_save(self, like_type, dislike_type, remove_like_type, remove_dislike_type):
'''
Save intelligence classifiers (tags, titles, authors, and the feed) for a feed.
TODO: Make this usable.
'''
raise NotImplemented
@request('import/opml_export')
def opml_export(self):
'''
Download a backup of feeds and folders as an OPML file.
Contains folders and feeds in XML; useful for importing in another RSS reader.
'''
return
@request('import/opml_upload')
def opml_upload(self, opml_file):
'''
Upload an OPML file.
'''
f = open(opml_file)
return {
'file': f
}
|
mit
|
hydroshare/hydroshare-demo-auth
|
setup.py
|
3
|
1620
|
import os
from setuptools import setup, find_packages
def read_file(filename):
"""Read a file into a string"""
path = os.path.abspath(os.path.dirname(__file__))
filepath = os.path.join(path, filename)
try:
return open(filepath).read()
except IOError:
return ''
setup(
name='django-all-access',
version=__import__('allaccess').__version__,
author='Mark Lavin',
author_email='[email protected]',
packages=find_packages(),
include_package_data=True,
url='https://github.com/mlavin/django-all-access',
license='BSD',
description=' '.join(__import__('allaccess').__doc__.splitlines()).strip(),
classifiers=[
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Software Development :: Libraries :: Python Modules',
'Development Status :: 4 - Beta',
'Operating System :: OS Independent',
],
long_description=read_file('README.rst'),
install_requires=(
'pycrypto>=2.4',
'requests>=1.0',
'requests_oauthlib>=0.3.0',
'oauthlib>=0.3.4',
),
tests_require=('mock>=0.8', ),
test_suite="runtests.runtests",
zip_safe=False,
)
|
bsd-2-clause
|
CSD-Public/stonix
|
src/MacBuild/proto/lib/manage_user/macos_user.py
|
1
|
31321
|
"""
Cross platform user creation and management
Created for testing cross user testing for the ramdisk project, specifically
unionfs functionality.
@author: Roy Nielsen
"""
import re
import os
import pty
import sys
import shutil
from subprocess import Popen
##########
# local app libraries
from lib.manage_user.parent_manage_user import ParentManageUser
from lib.manage_user.parent_manage_user import BadUserInfoError
from lib.run_commands import RunWith
from lib.loggers import CyLogger
from lib.loggers import LogPriority as lp
class DsclError(Exception):
'''Meant for being thrown when an action/class being run/instanciated is not
applicable for the running operating system.
@author: Roy Nielsen
'''
def __init__(self, *args, **kwargs):
Exception.__init__(self, *args, **kwargs)
class CreateHomeDirError(Exception):
'''Meant for being thrown when an action/class being run/instanciated is not
applicable for the running operating system.
@author: Roy Nielsen
'''
def __init__(self, *args, **kwargs):
Exception.__init__(self, *args, **kwargs)
class MacOSUser(ParentManageUser):
'''Class to manage users on Mac OS.
#----- Getters
@method findUniqueUid
@method uidTaken
@method getUser
@method getUserShell
@method getUserComment
@method getUserUid
@method getUserPriGid
@method getUserHomeDir
@method isUserInstalled
@method isUserInGroup
@method authenticate
#----- Setters
@method createStandardUser
@method createBasicUser
@method setUserShell
@method setUserComment
@method setUserUid
@method setUserPriGid
@method setUserHomeDir
@method createHomeDirectory
@method addUserToGroup
@method setUserPassword
@method fixUserHome
#----- User removal
@method rmUser
@method rmUserFromGroup
@method rmUserHome
@author: Roy Nielsen
'''
def __init__(self, **kwargs):
"""
Variables that can be passed in:
logger
userName
userShell
userComment
userUid
userPriGid
userHomeDir
"""
if 'logDispatcher' not in kwargs:
raise ValueError("Variable 'logDispatcher' a required parameter for " + str(self.__class__.__name__))
super(MacOSUser, self).__init__(**kwargs)
self.module_version = '20160225.125554.540679'
self.dscl = "/usr/bin/dscl"
self.runWith = RunWith(self.logger)
#----------------------------------------------------------------------
# Getters
#----------------------------------------------------------------------
def findUniqueUid(self):
'''We need to make sure to find an unused uid (unique ID) for the user,
$ dscl . -list /Users UniqueID
will list all the existing users, an unused number above 500 is good.
@author: Roy Nielsen
'''
success = False
maxUserID = 0
newUserID = 0
userList = self.getDscl(".", "-list", "/Users", "UniqueID")
#####
# Sort the list, add one to the highest value and return that
# value
for user in str(userList).split("\n"):
if int(user.split()[1]) > maxUserID:
maxUserID = int(user.split()[1])
newUserID = str(int(maxUserID + 1))
return newUserID
#----------------------------------------------------------------------
def uidTaken(self, uid):
'''See if the UID requested has been taken. Only approve uid's over 1k
$ dscl . -list /Users UniqueID
@author: Roy Nielsen
:param uid:
'''
uidList = []
success = False
userList = self.getDscl(".", "-list", "/Users", "UniqueID")
#####
# Sort the list, add one to the highest value and return that
# value
for user in str(userList).split("\n"):
uidList.append(str(user.split()[1]))
if str(uid) in uidList:
success = True
return success
#----------------------------------------------------------------------
def getUser(self, userName=""):
'''
:param userName: (Default value = "")
'''
userInfo = False
if self.isSaneUserName(userName):
output = self.getDscl(".", "read", "/Users/" + str(userName), "RecordName")
try:
userInfo = output.split()[1]
except (KeyError, IndexError) as err:
self.logger.log(lp.INFO, "Error attempting to find user" + \
str(userName) + " in the " + \
"directory service.")
else:
raise BadUserInfoError("Need a valid user name...")
return userInfo
#----------------------------------------------------------------------
def getUserShell(self, userName=""):
'''
:param userName: (Default value = "")
'''
userShell = False
if self.isSaneUserName(userName):
output = self.getDscl(".", "read", "/Users/" + str(userName), "UserShell")
try:
userShell = output.split()[1]
except (KeyError, IndexError) as err:
self.logger.log(lp.INFO, "Error attempting to find user" + \
str(userName) + " in the " + \
"directory service.")
else:
raise BadUserInfoError("Need a valid user name...")
return userShell
#----------------------------------------------------------------------
def getUserComment(self, userName=""):
'''
:param userName: (Default value = "")
'''
userComment = False
if self.isSaneUserName(userName):
#####
# Need to process the output to get the right information due to a
# spurrious "\n" in the output
output = self.getDscl(".", "read", "/Users/" + str(userName), "RealName")
try:
userComment = output[1]
except (KeyError, IndexError) as err:
self.logger.log(lp.INFO, "Error attempting to find user" + \
str(userName) + " in the " + \
"directory service.")
else:
raise BadUserInfoError("Need a valid user name...")
return userComment
#----------------------------------------------------------------------
def getUserUid(self, userName=""):
'''
:param userName: (Default value = "")
'''
userUid = False
if self.isSaneUserName(userName):
output = self.getDscl(".", "read", "/Users/" + str(userName), "UniqueID")
#####
# Process to get out the right information....
try:
userUid = output.split()[1]
except (KeyError, IndexError) as err:
self.logger.log(lp.INFO, "Error attempting to find user" + \
str(userName) + " in the " + \
"directory service.")
else:
raise BadUserInfoError("Need a valid user name...")
return userUid
#----------------------------------------------------------------------
def getUserPriGid(self, userName=""):
'''
:param userName: (Default value = "")
'''
userPriGid = False
if self.isSaneUserName(userName):
output = self.getDscl(".", "read", "/Users/" + str(userName), "PrimaryGroupID")
#####
# Process to get out the right information....
try:
userPriGid = output.split()[1]
except (KeyError, IndexError) as err:
self.logger.log(lp.INFO, "Error attempting to find user" + \
str(userName) + " in the " + \
"directory service.")
else:
raise BadUserInfoError("Need a valid user name...")
return userPriGid
#----------------------------------------------------------------------
def getUserHomeDir(self, userName=""):
'''
:param userName: (Default value = "")
'''
userHomeDir = False
if self.isSaneUserName(userName):
output = self.getDscl(".", "read", "/Users/" + str(userName), "NFSHomeDirectory")
#####
# Process to get out the right information....
try:
userHomeDir = output.split()[1]
except (KeyError, IndexError) as err:
self.logger.log(lp.INFO, "Error attempting to find user" + \
str(userName) + " in the " + \
"directory service.")
else:
raise BadUserInfoError("Need a valid user name...")
return userHomeDir
#----------------------------------------------------------------------
def isUserInstalled(self, user=""):
'''Check if the user "user" is installed
@author Roy Nielsen
:param user: (Default value = "")
'''
success = False
if self.isSaneUserName(user):
cmd = [self.dscl, ".", "-read", "/Users/" + str(user)]
self.runWith.setCommand(cmd)
self.runWith.communicate()
retval, reterr, retcode = self.runWith.getNlogReturns()
if not reterr:
success = True
return success
#----------------------------------------------------------------------
def isUserInGroup(self, userName="", groupName=""):
'''Check if this user is in this group
@author: Roy Nielsen
:param userName: (Default value = "")
:param groupName: (Default value = "")
'''
self.logger.log(lp.DEBUG, "U: " + str(userName))
self.logger.log(lp.DEBUG, "G: " + str(groupName))
success = False
if self.isSaneUserName(userName) and self.isSaneGroupName(groupName):
output = self.getDscl(".", "-read", "/Groups/" + groupName, "users")
self.logger.log(lp.CRITICAL, "Output: " + str(output))
users = output[1:]
self.logger.log(lp.CRITICAL, "Users: " + str(users))
if userName in users:
success = True
return success
#----------------------------------------------------------------------
def validateUser(self, userName=False, userShell=False, userComment=False,
userUid=False, userPriGid=False, userHomeDir=False):
'''Future functionality... validate that the passed in parameters to the
class instanciation match.
@author:
:param userName: (Default value = False)
:param userShell: (Default value = False)
:param userComment: (Default value = False)
:param userUid: (Default value = False)
:param userPriGid: (Default value = False)
:param userHomeDir: (Default value = False)
'''
sane = False
#####
# Look up all user attributes and check that they are accurate.
# Only check the "SANE" parameters passed in.
if self.isSaneUserName(userName):
self.userName = userName
sane = True
else:
raise BadUserInfoError("Need a valid user name...")
if self.isSaneUserShell(userShell) and sane:
self.userShell = userShell
elif not userShell:
pass
else:
sane = False
if self.isSaneUserComment(userComment) and sane:
self.userComment = userComment
elif not userComment:
pass
else:
sane = False
if self.isSaneUserUid(str(userUid)) and sane:
self.userUid = self.userUid
elif not userUid:
pass
else:
sane = False
if self.isSaneUserPriGid(str(userPriGid)) and sane:
self.userUid = userUid
elif not userPriGid:
pass
else:
sane = False
if self.isSaneUserHomeDir(userHomeDir) and sane:
self.userHomeDir = userHomeDir
elif not userHomeDir:
pass
else:
sane = False
return sane
def authenticate(self, user="", password=""):
'''Open a pty to run "su" to see if the password is correct...
:param user: (Default value = "")
:param password: (Default value = "")
'''
authenticated = False
if not self.isSaneUserName(user) or \
re.match("^\s+$", password) or not password:
self.logger.log(lp.INFO, "Cannot pass in empty or bad parameters...")
self.logger.log(lp.INFO, "user = \"" + str(user) + "\"")
self.logger.log(lp.INFO, "check password...")
else:
output = ""
internal_command = ["/usr/bin/su", "-", str(user), "-c", "/bin/echo hello world"]
command = " ".join(internal_command)
self.logger.log(lp.INFO, "command: " + str(command))
(master, slave) = pty.openpty()
process = Popen(internal_command, stdin=slave, stdout=slave, stderr=slave, shell=False)
#####
# Read password prompt
prompt = os.read(master, 512)
#####
# send the password
os.write(master, password + "\n")
#####
# catch the password
prompt = os.read(master, 512)
#####
# catch the output
output = os.read(master, 512)
os.close(master)
os.close(slave)
process.wait()
output = output.strip()
#####
# Check if valid or not...
if re.match("^su: Sorry", str(output)):
authenticated = False
elif re.match("^hello world", str(output)):
authenticated = True
else:
authenticated = False
self.logger.log(lp.INFO, "Leaving authenticate method with " + \
"output of: \"" + str(output) + "\"")
return authenticated
#----------------------------------------------------------------------
# Setters
#----------------------------------------------------------------------
def createStandardUser(self, userName, password):
'''Creates a user that has the "next" uid in line to be used, then puts
in in a group of the same id. Uses /bin/bash as the standard shell.
The userComment is left empty. Primary use is managing a user
during test automation, when requiring a "user" context.
It does not set a login keychain password as that is created on first
login to the GUI.
@author: Roy Nielsen
:param userName:
:param password:
'''
self.createBasicUser(userName)
newUserID = self.findUniqueUid()
newUserGID = newUserID
self.setUserUid(userName, newUserID)
self.setUserPriGid(userName, newUserID)
self.setUserHomeDir(userName)
self.setUserShell(userName, "/bin/bash")
self.setUserPassword(userName, password)
#####
# Don't need to set the user login keychain password as it should be
# created on first login.
#----------------------------------------------------------------------
def createBasicUser(self, userName=""):
'''Create a username with just a moniker. Allow the system to take care of
the rest.
Only allow usernames with letters and numbers.
On the MacOS platform, all other steps must also be done.
@author: Roy Nielsen
:param userName: (Default value = "")
'''
success = False
reterr = ""
if isinstance(userName, str)\
and re.match("^[A-Za-z][A-Za-z0-9]*$", userName):
cmd = [self.dscl, ".", "-create", "/Users/" + str(userName)]
self.runWith.setCommand(cmd)
self.runWith.communicate()
retval, reterr, retcode = self.runWith.getNlogReturns()
if not reterr:
success = True
else:
raise DsclError("Error trying to set a value with dscl (" + \
str(reterr).strip() + ")")
return success
#----------------------------------------------------------------------
def setUserShell(self, user="", shell=""):
'''dscl . -create /Users/luser UserShell /bin/bash
@author: Roy Nielsen
:param user: (Default value = "")
:param shell: (Default value = "")
'''
success = False
if self.isSaneUserName(user) and self.isSaneUserShell(shell):
isSetDSL = self.setDscl(".", "-create", "/Users/" + str(user),
"UserShell", str(shell))
if isSetDSL:
success = True
return success
#----------------------------------------------------------------------
def setUserComment(self, user="", comment=""):
'''dscl . -create /Users/luser RealName "Real A. Name"
@author: Roy Nielsen
:param user: (Default value = "")
:param comment: (Default value = "")
'''
success = False
if self.isSaneUserName(user) and comment:
isSetDSL = self.setDscl(".", "-create", "/Users/" + str(user),
"RealName", str(comment))
if isSetDSL:
success = True
return success
#----------------------------------------------------------------------
def setUserUid(self, user="", uid=""):
'''dscl . -create /Users/luser UniqueID "503"
@author: Roy Nielsen
:param user: (Default value = "")
:param uid: (Default value = "")
'''
success = False
if self.isSaneUserName(user) and uid:
isSetDSL = self.setDscl(".", "-create", "/Users/" + str(user),
"UniqueID", str(uid))
if isSetDSL:
success = True
return success
#----------------------------------------------------------------------
def setUserPriGid(self, user="", priGid=""):
'''dscl . -create /Users/luser PrimaryGroupID 20
@author: Roy Nielsen
:param user: (Default value = "")
:param priGid: (Default value = "")
'''
success = False
if self.isSaneUserName(user) and priGid:
isSetDSL = self.setDscl(".", "-create", "/Users/" + str(user),
"PrimaryGroupID", str(priGid))
if isSetDSL:
success = True
return success
#----------------------------------------------------------------------
def setUserHomeDir(self, user="", userHome=""):
'''Create a "local" home directory
dscl . -create /Users/luser NFSHomeDirectory /Users/luser
better yet:
createhomedir -l -u <username>
@author: Roy Nielsen
:param user: (Default value = "")
:param userHome: (Default value = "")
'''
success = False
#####
# Creating a non-standard userHome is not currently permitted
#if self.saneUserName(user) and self.saneUserHomeDir(userHome):
if self.isSaneUserName(user):
isSetDSCL = self.setDscl(".", "-create", "/Users/" + str(user),
"NFSHomeDirectory", str("/Users/" + str(user)))
if isSetDSCL:
success = True
return success
#----------------------------------------------------------------------
def createHomeDirectory(self, user=""):
'''createhomedir -c -u luser
This should use the system "User Template" for standard system user
settings.
@author: Roy Nielsen
:param user: (Default value = "")
'''
success = False
reterr = ""
if user:
cmd = ["/usr/sbin/createhomedir", "-c", " -u", + str(user)]
self.runWith.setCommand(cmd)
self.runWith.communicate()
retval, reterr, retcode = self.runWith.getNlogReturns()
if not reterr:
success = True
else:
raise CreateHomeDirError("Error trying to create user home (" + \
str(reterr).strip() + ")")
return success
#----------------------------------------------------------------------
def addUserToGroup(self, user="", group=""):
'''dscl . -append /Groups/admin GroupMembership luser
@author: Roy Nielsen
:param user: (Default value = "")
:param group: (Default value = "")
'''
success = False
if self.isSaneUserName(user) and self.isSaneGroupName(group):
isSetDSCL = self.setDscl(".", "-append", "/Groups/" + str(group),
"GroupMembership", str(user))
if isSetDSCL:
success = True
return success
#----------------------------------------------------------------------
def setUserPassword(self, user="", password="", oldPassword=""):
'''dscl . -passwd /Users/luser password
-- or --
dscl . -passwd /Users/luser oldPassword password
@author: Roy Nielsen
:param user: (Default value = "")
:param password: (Default value = "")
:param oldPassword: (Default value = "")
'''
success = False
if self.isSaneUserName(user):
if oldPassword:
isSetDSCL = self.setDscl(".", "-passwd", "/Users/" + str(user),
'%s'%oldPassword, '%s'%password)
else:
isSetDSCL = self.setDscl(".", "-passwd", "/Users/" + str(user),
'%s'%password)
self.logger.log(lp.DEBUG, "isSetDSCL: " + str(isSetDSCL))
else:
self.logger.log(lp.DEBUG, "Tribbles in the bulkhead Jim!")
if not isSetDSCL:
success = False
else:
success = True
return success
#----------------------------------------------------------------------
def fixUserHome(self, userName=""):
'''Get the user information from the local directory and fix the user
ownership and group of the user's home directory to reflect
what is in the local directory service.
@author: Roy Nielsen
:param userName: (Default value = "")
'''
success = False
if self.isSaneUserName(userName):
#####
# Acquire the user data based on the username first.
try:
userUid = self.getUserUid(userName)
userPriGid = self.getUserPriGid(userName)
userHomeDir = self.getUserHomeDir(userName)
except BadUserInfoError as err:
self.logger.log(lp.INFO, "Exception trying to find: \"" + \
str(userName) + "\" user information")
self.logger.log(lp.INFO, "err: " + str(err))
else:
success = True
if success:
try:
for root, dirs, files in os.walk(userHomeDir):
for d in dirs:
os.chown(os.path.join(root, d), userUid, userPriGid)
for f in files:
os.chown(os.path.join(root, d, f), userUid, userPriGid)
except:
success = False
self.logger.log(lp.INFO, "Exception attempting to chown...")
raise err
else:
success = True
return success
#----------------------------------------------------------------------
# User Property Removal
#----------------------------------------------------------------------
def rmUser(self, user=""):
'''dscl . delete /Users/<user>
@author: Roy Nielsen
:param user: (Default value = "")
'''
success = False
if self.isSaneUserName(user):
cmd = [self.dscl, ".", "-delete", "/Users/" + str(user)]
self.runWith.setCommand(cmd)
self.runWith.communicate()
retval, reterr, retcode = self.runWith.getNlogReturns()
if not reterr:
success = True
else:
raise Exception("Error trying to remove a user (" + \
str(reterr).strip() + ")")
return success
#----------------------------------------------------------------------
def rmUserHome(self, user=""):
'''Remove the user home... right now only default location, but should
look up the user home in the directory service and remove that
specifically.
@author: Roy Nielsen
:param user: (Default value = "")
'''
success = False
if self.isSaneUserName(user):
#####
#
# ***** WARNING WILL ROBINSON *****
#
# Please refactor to do a lookup of the user in the directory
# service, and use the home directory specified there..
#
try:
shutil.rmtree("/Users/" + str(user))
except IOError or OSError as err:
self.logger.log(lp.INFO, "Exception trying to remove user home...")
self.logger.log(lp.INFO, "Exception: " + str(err))
raise err
else:
success = True
return success
#----------------------------------------------------------------------
def rmUserFromGroup(self, user="", group=""):
'''
:param user: (Default value = "")
:param group: (Default value = "")
'''
success = False
if self.isSaneUserName(user) and self.isSaneGroupName(group):
isSetDSCL = self.setDscl(".", "-delete", "/Groups/" + str(group),
"GroupMembership", str(user))
if isSetDSCL:
success = True
return success
#----------------------------------------------------------------------
# Mac OS Specific Methods
#----------------------------------------------------------------------
def setDscl(self, directory=".", action="", object="", property="", value=""):
'''Using dscl to set a value in a directory...
@author: Roy Nielsen
:param directory: (Default value = ".")
:param action: (Default value = "")
:param object: (Default value = "")
:param property: (Default value = "")
:param value: (Default value = "")
'''
success = False
reterr = ""
retval = ""
#####
# If elevated, use the liftDown runWith method to run the command as
# a regular user.
if directory and action and object and property:
if directory and action and object and property and value:
cmd = [self.dscl, directory, action, object, property, value]
else:
cmd = [self.dscl, directory, action, object, property]
self.runWith.setCommand(cmd)
if re.match("^%0$", str(os.getuid()).strip()):
#####
# Run the command, lift down...
self.logger.log(lp.DEBUG, "dscl-cmd: " + str(cmd))
self.runWith.liftDown(self.userName)
self.logger.log(lp.INFO, "Took the lift down...")
retval, reterr, retcode = self.runWith.getNlogReturns()
if not reterr:
success = True
else:
#####
# Run the command
retval, reterr, retcode = self.runWith.communicate()
if not reterr:
success = True
retval, reterr, retcode = self.runWith.getNlogReturns()
return success
#----------------------------------------------------------------------
def getDscl(self, directory="", action="", dirobj="", dirprop=""):
'''Using dscl to retrieve a value from the directory
@author: Roy Nielsen
:param directory: (Default value = "")
:param action: (Default value = "")
:param dirobj: (Default value = "")
:param dirprop: (Default value = "")
'''
success = False
reterr = ""
retval = ""
#####
# FIRST VALIDATE INPUT!!
if isinstance(directory, str) and re.match("^[/\.][A-Za-z0-9/]*", directory):
success = True
else:
success = False
if isinstance(action, str) and re.match("^[-]*[a-z]+", action) and success:
success = True
else:
success = False
if isinstance(dirobj, str) and re.match("^[A-Za-z0=9/]+", dirobj) and success:
success = True
else:
success = False
if isinstance(dirprop, str) and re.match("^[A-Za-z0-9]+", dirprop) and success:
success = True
else:
success = False
self.logger.log(lp.CRITICAL, "SUCCESS: " + str(success))
#####
# Now do the directory lookup.
if success:
cmd = [self.dscl, directory, action, dirobj, dirprop]
self.runWith.setCommand(cmd)
self.runWith.communicate()
retval, reterr, retcode = self.runWith.getNlogReturns()
if not reterr:
success = True
else:
raise DsclError("Error trying to get a value with dscl (" + \
str(reterr).strip() + ")")
return retval
#----------------------------------------------------------------------
def isUserAnAdmin(self, userName=""):
'''Check if this user is in this group
@author: Roy Nielsen
:param userName: (Default value = "")
'''
success = False
if self.isSaneUserName(userName):
success = self.isUserInGroup(userName, "admin")
return success
#----------------------------------------------------------------------
def acquireUserData(self):
'''Acquire user data for local user lookup information.
@author: Roy Nielsen
'''
pass
|
gpl-2.0
|
mlachwani/Android-4.4.3-HTC-M8-Kernel-ATT
|
Documentation/target/tcm_mod_builder.py
|
4981
|
41422
|
#!/usr/bin/python
# The TCM v4 multi-protocol fabric module generation script for drivers/target/$NEW_MOD
#
# Copyright (c) 2010 Rising Tide Systems
# Copyright (c) 2010 Linux-iSCSI.org
#
# Author: [email protected]
#
import os, sys
import subprocess as sub
import string
import re
import optparse
tcm_dir = ""
fabric_ops = []
fabric_mod_dir = ""
fabric_mod_port = ""
fabric_mod_init_port = ""
def tcm_mod_err(msg):
print msg
sys.exit(1)
def tcm_mod_create_module_subdir(fabric_mod_dir_var):
if os.path.isdir(fabric_mod_dir_var) == True:
return 1
print "Creating fabric_mod_dir: " + fabric_mod_dir_var
ret = os.mkdir(fabric_mod_dir_var)
if ret:
tcm_mod_err("Unable to mkdir " + fabric_mod_dir_var)
return
def tcm_mod_build_FC_include(fabric_mod_dir_var, fabric_mod_name):
global fabric_mod_port
global fabric_mod_init_port
buf = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_base.h"
print "Writing file: " + f
p = open(f, 'w');
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "#define " + fabric_mod_name.upper() + "_VERSION \"v0.1\"\n"
buf += "#define " + fabric_mod_name.upper() + "_NAMELEN 32\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_nacl {\n"
buf += " /* Binary World Wide unique Port Name for FC Initiator Nport */\n"
buf += " u64 nport_wwpn;\n"
buf += " /* ASCII formatted WWPN for FC Initiator Nport */\n"
buf += " char nport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_nodeacl() */\n"
buf += " struct se_node_acl se_node_acl;\n"
buf += "};\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_tpg {\n"
buf += " /* FC lport target portal group tag for TCM */\n"
buf += " u16 lport_tpgt;\n"
buf += " /* Pointer back to " + fabric_mod_name + "_lport */\n"
buf += " struct " + fabric_mod_name + "_lport *lport;\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tpg() */\n"
buf += " struct se_portal_group se_tpg;\n"
buf += "};\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_lport {\n"
buf += " /* SCSI protocol the lport is providing */\n"
buf += " u8 lport_proto_id;\n"
buf += " /* Binary World Wide unique Port Name for FC Target Lport */\n"
buf += " u64 lport_wwpn;\n"
buf += " /* ASCII formatted WWPN for FC Target Lport */\n"
buf += " char lport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_lport() */\n"
buf += " struct se_wwn lport_wwn;\n"
buf += "};\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
fabric_mod_port = "lport"
fabric_mod_init_port = "nport"
return
def tcm_mod_build_SAS_include(fabric_mod_dir_var, fabric_mod_name):
global fabric_mod_port
global fabric_mod_init_port
buf = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_base.h"
print "Writing file: " + f
p = open(f, 'w');
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "#define " + fabric_mod_name.upper() + "_VERSION \"v0.1\"\n"
buf += "#define " + fabric_mod_name.upper() + "_NAMELEN 32\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_nacl {\n"
buf += " /* Binary World Wide unique Port Name for SAS Initiator port */\n"
buf += " u64 iport_wwpn;\n"
buf += " /* ASCII formatted WWPN for Sas Initiator port */\n"
buf += " char iport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_nodeacl() */\n"
buf += " struct se_node_acl se_node_acl;\n"
buf += "};\n\n"
buf += "struct " + fabric_mod_name + "_tpg {\n"
buf += " /* SAS port target portal group tag for TCM */\n"
buf += " u16 tport_tpgt;\n"
buf += " /* Pointer back to " + fabric_mod_name + "_tport */\n"
buf += " struct " + fabric_mod_name + "_tport *tport;\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tpg() */\n"
buf += " struct se_portal_group se_tpg;\n"
buf += "};\n\n"
buf += "struct " + fabric_mod_name + "_tport {\n"
buf += " /* SCSI protocol the tport is providing */\n"
buf += " u8 tport_proto_id;\n"
buf += " /* Binary World Wide unique Port Name for SAS Target port */\n"
buf += " u64 tport_wwpn;\n"
buf += " /* ASCII formatted WWPN for SAS Target port */\n"
buf += " char tport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tport() */\n"
buf += " struct se_wwn tport_wwn;\n"
buf += "};\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
fabric_mod_port = "tport"
fabric_mod_init_port = "iport"
return
def tcm_mod_build_iSCSI_include(fabric_mod_dir_var, fabric_mod_name):
global fabric_mod_port
global fabric_mod_init_port
buf = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_base.h"
print "Writing file: " + f
p = open(f, 'w');
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "#define " + fabric_mod_name.upper() + "_VERSION \"v0.1\"\n"
buf += "#define " + fabric_mod_name.upper() + "_NAMELEN 32\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_nacl {\n"
buf += " /* ASCII formatted InitiatorName */\n"
buf += " char iport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_nodeacl() */\n"
buf += " struct se_node_acl se_node_acl;\n"
buf += "};\n\n"
buf += "struct " + fabric_mod_name + "_tpg {\n"
buf += " /* iSCSI target portal group tag for TCM */\n"
buf += " u16 tport_tpgt;\n"
buf += " /* Pointer back to " + fabric_mod_name + "_tport */\n"
buf += " struct " + fabric_mod_name + "_tport *tport;\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tpg() */\n"
buf += " struct se_portal_group se_tpg;\n"
buf += "};\n\n"
buf += "struct " + fabric_mod_name + "_tport {\n"
buf += " /* SCSI protocol the tport is providing */\n"
buf += " u8 tport_proto_id;\n"
buf += " /* ASCII formatted TargetName for IQN */\n"
buf += " char tport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tport() */\n"
buf += " struct se_wwn tport_wwn;\n"
buf += "};\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
fabric_mod_port = "tport"
fabric_mod_init_port = "iport"
return
def tcm_mod_build_base_includes(proto_ident, fabric_mod_dir_val, fabric_mod_name):
if proto_ident == "FC":
tcm_mod_build_FC_include(fabric_mod_dir_val, fabric_mod_name)
elif proto_ident == "SAS":
tcm_mod_build_SAS_include(fabric_mod_dir_val, fabric_mod_name)
elif proto_ident == "iSCSI":
tcm_mod_build_iSCSI_include(fabric_mod_dir_val, fabric_mod_name)
else:
print "Unsupported proto_ident: " + proto_ident
sys.exit(1)
return
def tcm_mod_build_configfs(proto_ident, fabric_mod_dir_var, fabric_mod_name):
buf = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_configfs.c"
print "Writing file: " + f
p = open(f, 'w');
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "#include <linux/module.h>\n"
buf += "#include <linux/moduleparam.h>\n"
buf += "#include <linux/version.h>\n"
buf += "#include <generated/utsrelease.h>\n"
buf += "#include <linux/utsname.h>\n"
buf += "#include <linux/init.h>\n"
buf += "#include <linux/slab.h>\n"
buf += "#include <linux/kthread.h>\n"
buf += "#include <linux/types.h>\n"
buf += "#include <linux/string.h>\n"
buf += "#include <linux/configfs.h>\n"
buf += "#include <linux/ctype.h>\n"
buf += "#include <asm/unaligned.h>\n\n"
buf += "#include <target/target_core_base.h>\n"
buf += "#include <target/target_core_fabric.h>\n"
buf += "#include <target/target_core_fabric_configfs.h>\n"
buf += "#include <target/target_core_configfs.h>\n"
buf += "#include <target/configfs_macros.h>\n\n"
buf += "#include \"" + fabric_mod_name + "_base.h\"\n"
buf += "#include \"" + fabric_mod_name + "_fabric.h\"\n\n"
buf += "/* Local pointer to allocated TCM configfs fabric module */\n"
buf += "struct target_fabric_configfs *" + fabric_mod_name + "_fabric_configfs;\n\n"
buf += "static struct se_node_acl *" + fabric_mod_name + "_make_nodeacl(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " struct config_group *group,\n"
buf += " const char *name)\n"
buf += "{\n"
buf += " struct se_node_acl *se_nacl, *se_nacl_new;\n"
buf += " struct " + fabric_mod_name + "_nacl *nacl;\n"
if proto_ident == "FC" or proto_ident == "SAS":
buf += " u64 wwpn = 0;\n"
buf += " u32 nexus_depth;\n\n"
buf += " /* " + fabric_mod_name + "_parse_wwn(name, &wwpn, 1) < 0)\n"
buf += " return ERR_PTR(-EINVAL); */\n"
buf += " se_nacl_new = " + fabric_mod_name + "_alloc_fabric_acl(se_tpg);\n"
buf += " if (!se_nacl_new)\n"
buf += " return ERR_PTR(-ENOMEM);\n"
buf += "//#warning FIXME: Hardcoded nexus depth in " + fabric_mod_name + "_make_nodeacl()\n"
buf += " nexus_depth = 1;\n"
buf += " /*\n"
buf += " * se_nacl_new may be released by core_tpg_add_initiator_node_acl()\n"
buf += " * when converting a NodeACL from demo mode -> explict\n"
buf += " */\n"
buf += " se_nacl = core_tpg_add_initiator_node_acl(se_tpg, se_nacl_new,\n"
buf += " name, nexus_depth);\n"
buf += " if (IS_ERR(se_nacl)) {\n"
buf += " " + fabric_mod_name + "_release_fabric_acl(se_tpg, se_nacl_new);\n"
buf += " return se_nacl;\n"
buf += " }\n"
buf += " /*\n"
buf += " * Locate our struct " + fabric_mod_name + "_nacl and set the FC Nport WWPN\n"
buf += " */\n"
buf += " nacl = container_of(se_nacl, struct " + fabric_mod_name + "_nacl, se_node_acl);\n"
if proto_ident == "FC" or proto_ident == "SAS":
buf += " nacl->" + fabric_mod_init_port + "_wwpn = wwpn;\n"
buf += " /* " + fabric_mod_name + "_format_wwn(&nacl->" + fabric_mod_init_port + "_name[0], " + fabric_mod_name.upper() + "_NAMELEN, wwpn); */\n\n"
buf += " return se_nacl;\n"
buf += "}\n\n"
buf += "static void " + fabric_mod_name + "_drop_nodeacl(struct se_node_acl *se_acl)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_nacl *nacl = container_of(se_acl,\n"
buf += " struct " + fabric_mod_name + "_nacl, se_node_acl);\n"
buf += " core_tpg_del_initiator_node_acl(se_acl->se_tpg, se_acl, 1);\n"
buf += " kfree(nacl);\n"
buf += "}\n\n"
buf += "static struct se_portal_group *" + fabric_mod_name + "_make_tpg(\n"
buf += " struct se_wwn *wwn,\n"
buf += " struct config_group *group,\n"
buf += " const char *name)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + "*" + fabric_mod_port + " = container_of(wwn,\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + ", " + fabric_mod_port + "_wwn);\n\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg;\n"
buf += " unsigned long tpgt;\n"
buf += " int ret;\n\n"
buf += " if (strstr(name, \"tpgt_\") != name)\n"
buf += " return ERR_PTR(-EINVAL);\n"
buf += " if (strict_strtoul(name + 5, 10, &tpgt) || tpgt > UINT_MAX)\n"
buf += " return ERR_PTR(-EINVAL);\n\n"
buf += " tpg = kzalloc(sizeof(struct " + fabric_mod_name + "_tpg), GFP_KERNEL);\n"
buf += " if (!tpg) {\n"
buf += " printk(KERN_ERR \"Unable to allocate struct " + fabric_mod_name + "_tpg\");\n"
buf += " return ERR_PTR(-ENOMEM);\n"
buf += " }\n"
buf += " tpg->" + fabric_mod_port + " = " + fabric_mod_port + ";\n"
buf += " tpg->" + fabric_mod_port + "_tpgt = tpgt;\n\n"
buf += " ret = core_tpg_register(&" + fabric_mod_name + "_fabric_configfs->tf_ops, wwn,\n"
buf += " &tpg->se_tpg, (void *)tpg,\n"
buf += " TRANSPORT_TPG_TYPE_NORMAL);\n"
buf += " if (ret < 0) {\n"
buf += " kfree(tpg);\n"
buf += " return NULL;\n"
buf += " }\n"
buf += " return &tpg->se_tpg;\n"
buf += "}\n\n"
buf += "static void " + fabric_mod_name + "_drop_tpg(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n\n"
buf += " core_tpg_deregister(se_tpg);\n"
buf += " kfree(tpg);\n"
buf += "}\n\n"
buf += "static struct se_wwn *" + fabric_mod_name + "_make_" + fabric_mod_port + "(\n"
buf += " struct target_fabric_configfs *tf,\n"
buf += " struct config_group *group,\n"
buf += " const char *name)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + ";\n"
if proto_ident == "FC" or proto_ident == "SAS":
buf += " u64 wwpn = 0;\n\n"
buf += " /* if (" + fabric_mod_name + "_parse_wwn(name, &wwpn, 1) < 0)\n"
buf += " return ERR_PTR(-EINVAL); */\n\n"
buf += " " + fabric_mod_port + " = kzalloc(sizeof(struct " + fabric_mod_name + "_" + fabric_mod_port + "), GFP_KERNEL);\n"
buf += " if (!" + fabric_mod_port + ") {\n"
buf += " printk(KERN_ERR \"Unable to allocate struct " + fabric_mod_name + "_" + fabric_mod_port + "\");\n"
buf += " return ERR_PTR(-ENOMEM);\n"
buf += " }\n"
if proto_ident == "FC" or proto_ident == "SAS":
buf += " " + fabric_mod_port + "->" + fabric_mod_port + "_wwpn = wwpn;\n"
buf += " /* " + fabric_mod_name + "_format_wwn(&" + fabric_mod_port + "->" + fabric_mod_port + "_name[0], " + fabric_mod_name.upper() + "_NAMELEN, wwpn); */\n\n"
buf += " return &" + fabric_mod_port + "->" + fabric_mod_port + "_wwn;\n"
buf += "}\n\n"
buf += "static void " + fabric_mod_name + "_drop_" + fabric_mod_port + "(struct se_wwn *wwn)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = container_of(wwn,\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + ", " + fabric_mod_port + "_wwn);\n"
buf += " kfree(" + fabric_mod_port + ");\n"
buf += "}\n\n"
buf += "static ssize_t " + fabric_mod_name + "_wwn_show_attr_version(\n"
buf += " struct target_fabric_configfs *tf,\n"
buf += " char *page)\n"
buf += "{\n"
buf += " return sprintf(page, \"" + fabric_mod_name.upper() + " fabric module %s on %s/%s\"\n"
buf += " \"on \"UTS_RELEASE\"\\n\", " + fabric_mod_name.upper() + "_VERSION, utsname()->sysname,\n"
buf += " utsname()->machine);\n"
buf += "}\n\n"
buf += "TF_WWN_ATTR_RO(" + fabric_mod_name + ", version);\n\n"
buf += "static struct configfs_attribute *" + fabric_mod_name + "_wwn_attrs[] = {\n"
buf += " &" + fabric_mod_name + "_wwn_version.attr,\n"
buf += " NULL,\n"
buf += "};\n\n"
buf += "static struct target_core_fabric_ops " + fabric_mod_name + "_ops = {\n"
buf += " .get_fabric_name = " + fabric_mod_name + "_get_fabric_name,\n"
buf += " .get_fabric_proto_ident = " + fabric_mod_name + "_get_fabric_proto_ident,\n"
buf += " .tpg_get_wwn = " + fabric_mod_name + "_get_fabric_wwn,\n"
buf += " .tpg_get_tag = " + fabric_mod_name + "_get_tag,\n"
buf += " .tpg_get_default_depth = " + fabric_mod_name + "_get_default_depth,\n"
buf += " .tpg_get_pr_transport_id = " + fabric_mod_name + "_get_pr_transport_id,\n"
buf += " .tpg_get_pr_transport_id_len = " + fabric_mod_name + "_get_pr_transport_id_len,\n"
buf += " .tpg_parse_pr_out_transport_id = " + fabric_mod_name + "_parse_pr_out_transport_id,\n"
buf += " .tpg_check_demo_mode = " + fabric_mod_name + "_check_false,\n"
buf += " .tpg_check_demo_mode_cache = " + fabric_mod_name + "_check_true,\n"
buf += " .tpg_check_demo_mode_write_protect = " + fabric_mod_name + "_check_true,\n"
buf += " .tpg_check_prod_mode_write_protect = " + fabric_mod_name + "_check_false,\n"
buf += " .tpg_alloc_fabric_acl = " + fabric_mod_name + "_alloc_fabric_acl,\n"
buf += " .tpg_release_fabric_acl = " + fabric_mod_name + "_release_fabric_acl,\n"
buf += " .tpg_get_inst_index = " + fabric_mod_name + "_tpg_get_inst_index,\n"
buf += " .release_cmd = " + fabric_mod_name + "_release_cmd,\n"
buf += " .shutdown_session = " + fabric_mod_name + "_shutdown_session,\n"
buf += " .close_session = " + fabric_mod_name + "_close_session,\n"
buf += " .stop_session = " + fabric_mod_name + "_stop_session,\n"
buf += " .fall_back_to_erl0 = " + fabric_mod_name + "_reset_nexus,\n"
buf += " .sess_logged_in = " + fabric_mod_name + "_sess_logged_in,\n"
buf += " .sess_get_index = " + fabric_mod_name + "_sess_get_index,\n"
buf += " .sess_get_initiator_sid = NULL,\n"
buf += " .write_pending = " + fabric_mod_name + "_write_pending,\n"
buf += " .write_pending_status = " + fabric_mod_name + "_write_pending_status,\n"
buf += " .set_default_node_attributes = " + fabric_mod_name + "_set_default_node_attrs,\n"
buf += " .get_task_tag = " + fabric_mod_name + "_get_task_tag,\n"
buf += " .get_cmd_state = " + fabric_mod_name + "_get_cmd_state,\n"
buf += " .queue_data_in = " + fabric_mod_name + "_queue_data_in,\n"
buf += " .queue_status = " + fabric_mod_name + "_queue_status,\n"
buf += " .queue_tm_rsp = " + fabric_mod_name + "_queue_tm_rsp,\n"
buf += " .get_fabric_sense_len = " + fabric_mod_name + "_get_fabric_sense_len,\n"
buf += " .set_fabric_sense_len = " + fabric_mod_name + "_set_fabric_sense_len,\n"
buf += " .is_state_remove = " + fabric_mod_name + "_is_state_remove,\n"
buf += " /*\n"
buf += " * Setup function pointers for generic logic in target_core_fabric_configfs.c\n"
buf += " */\n"
buf += " .fabric_make_wwn = " + fabric_mod_name + "_make_" + fabric_mod_port + ",\n"
buf += " .fabric_drop_wwn = " + fabric_mod_name + "_drop_" + fabric_mod_port + ",\n"
buf += " .fabric_make_tpg = " + fabric_mod_name + "_make_tpg,\n"
buf += " .fabric_drop_tpg = " + fabric_mod_name + "_drop_tpg,\n"
buf += " .fabric_post_link = NULL,\n"
buf += " .fabric_pre_unlink = NULL,\n"
buf += " .fabric_make_np = NULL,\n"
buf += " .fabric_drop_np = NULL,\n"
buf += " .fabric_make_nodeacl = " + fabric_mod_name + "_make_nodeacl,\n"
buf += " .fabric_drop_nodeacl = " + fabric_mod_name + "_drop_nodeacl,\n"
buf += "};\n\n"
buf += "static int " + fabric_mod_name + "_register_configfs(void)\n"
buf += "{\n"
buf += " struct target_fabric_configfs *fabric;\n"
buf += " int ret;\n\n"
buf += " printk(KERN_INFO \"" + fabric_mod_name.upper() + " fabric module %s on %s/%s\"\n"
buf += " \" on \"UTS_RELEASE\"\\n\"," + fabric_mod_name.upper() + "_VERSION, utsname()->sysname,\n"
buf += " utsname()->machine);\n"
buf += " /*\n"
buf += " * Register the top level struct config_item_type with TCM core\n"
buf += " */\n"
buf += " fabric = target_fabric_configfs_init(THIS_MODULE, \"" + fabric_mod_name[4:] + "\");\n"
buf += " if (IS_ERR(fabric)) {\n"
buf += " printk(KERN_ERR \"target_fabric_configfs_init() failed\\n\");\n"
buf += " return PTR_ERR(fabric);\n"
buf += " }\n"
buf += " /*\n"
buf += " * Setup fabric->tf_ops from our local " + fabric_mod_name + "_ops\n"
buf += " */\n"
buf += " fabric->tf_ops = " + fabric_mod_name + "_ops;\n"
buf += " /*\n"
buf += " * Setup default attribute lists for various fabric->tf_cit_tmpl\n"
buf += " */\n"
buf += " TF_CIT_TMPL(fabric)->tfc_wwn_cit.ct_attrs = " + fabric_mod_name + "_wwn_attrs;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_base_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_attrib_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_param_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_np_base_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_nacl_base_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_nacl_attrib_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_nacl_auth_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_nacl_param_cit.ct_attrs = NULL;\n"
buf += " /*\n"
buf += " * Register the fabric for use within TCM\n"
buf += " */\n"
buf += " ret = target_fabric_configfs_register(fabric);\n"
buf += " if (ret < 0) {\n"
buf += " printk(KERN_ERR \"target_fabric_configfs_register() failed\"\n"
buf += " \" for " + fabric_mod_name.upper() + "\\n\");\n"
buf += " return ret;\n"
buf += " }\n"
buf += " /*\n"
buf += " * Setup our local pointer to *fabric\n"
buf += " */\n"
buf += " " + fabric_mod_name + "_fabric_configfs = fabric;\n"
buf += " printk(KERN_INFO \"" + fabric_mod_name.upper() + "[0] - Set fabric -> " + fabric_mod_name + "_fabric_configfs\\n\");\n"
buf += " return 0;\n"
buf += "};\n\n"
buf += "static void __exit " + fabric_mod_name + "_deregister_configfs(void)\n"
buf += "{\n"
buf += " if (!" + fabric_mod_name + "_fabric_configfs)\n"
buf += " return;\n\n"
buf += " target_fabric_configfs_deregister(" + fabric_mod_name + "_fabric_configfs);\n"
buf += " " + fabric_mod_name + "_fabric_configfs = NULL;\n"
buf += " printk(KERN_INFO \"" + fabric_mod_name.upper() + "[0] - Cleared " + fabric_mod_name + "_fabric_configfs\\n\");\n"
buf += "};\n\n"
buf += "static int __init " + fabric_mod_name + "_init(void)\n"
buf += "{\n"
buf += " int ret;\n\n"
buf += " ret = " + fabric_mod_name + "_register_configfs();\n"
buf += " if (ret < 0)\n"
buf += " return ret;\n\n"
buf += " return 0;\n"
buf += "};\n\n"
buf += "static void __exit " + fabric_mod_name + "_exit(void)\n"
buf += "{\n"
buf += " " + fabric_mod_name + "_deregister_configfs();\n"
buf += "};\n\n"
buf += "MODULE_DESCRIPTION(\"" + fabric_mod_name.upper() + " series fabric driver\");\n"
buf += "MODULE_LICENSE(\"GPL\");\n"
buf += "module_init(" + fabric_mod_name + "_init);\n"
buf += "module_exit(" + fabric_mod_name + "_exit);\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
return
def tcm_mod_scan_fabric_ops(tcm_dir):
fabric_ops_api = tcm_dir + "include/target/target_core_fabric.h"
print "Using tcm_mod_scan_fabric_ops: " + fabric_ops_api
process_fo = 0;
p = open(fabric_ops_api, 'r')
line = p.readline()
while line:
if process_fo == 0 and re.search('struct target_core_fabric_ops {', line):
line = p.readline()
continue
if process_fo == 0:
process_fo = 1;
line = p.readline()
# Search for function pointer
if not re.search('\(\*', line):
continue
fabric_ops.append(line.rstrip())
continue
line = p.readline()
# Search for function pointer
if not re.search('\(\*', line):
continue
fabric_ops.append(line.rstrip())
p.close()
return
def tcm_mod_dump_fabric_ops(proto_ident, fabric_mod_dir_var, fabric_mod_name):
buf = ""
bufi = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_fabric.c"
print "Writing file: " + f
p = open(f, 'w')
if not p:
tcm_mod_err("Unable to open file: " + f)
fi = fabric_mod_dir_var + "/" + fabric_mod_name + "_fabric.h"
print "Writing file: " + fi
pi = open(fi, 'w')
if not pi:
tcm_mod_err("Unable to open file: " + fi)
buf = "#include <linux/slab.h>\n"
buf += "#include <linux/kthread.h>\n"
buf += "#include <linux/types.h>\n"
buf += "#include <linux/list.h>\n"
buf += "#include <linux/types.h>\n"
buf += "#include <linux/string.h>\n"
buf += "#include <linux/ctype.h>\n"
buf += "#include <asm/unaligned.h>\n"
buf += "#include <scsi/scsi.h>\n"
buf += "#include <scsi/scsi_host.h>\n"
buf += "#include <scsi/scsi_device.h>\n"
buf += "#include <scsi/scsi_cmnd.h>\n"
buf += "#include <scsi/libfc.h>\n\n"
buf += "#include <target/target_core_base.h>\n"
buf += "#include <target/target_core_fabric.h>\n"
buf += "#include <target/target_core_configfs.h>\n\n"
buf += "#include \"" + fabric_mod_name + "_base.h\"\n"
buf += "#include \"" + fabric_mod_name + "_fabric.h\"\n\n"
buf += "int " + fabric_mod_name + "_check_true(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " return 1;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_check_true(struct se_portal_group *);\n"
buf += "int " + fabric_mod_name + "_check_false(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_check_false(struct se_portal_group *);\n"
total_fabric_ops = len(fabric_ops)
i = 0
while i < total_fabric_ops:
fo = fabric_ops[i]
i += 1
# print "fabric_ops: " + fo
if re.search('get_fabric_name', fo):
buf += "char *" + fabric_mod_name + "_get_fabric_name(void)\n"
buf += "{\n"
buf += " return \"" + fabric_mod_name[4:] + "\";\n"
buf += "}\n\n"
bufi += "char *" + fabric_mod_name + "_get_fabric_name(void);\n"
continue
if re.search('get_fabric_proto_ident', fo):
buf += "u8 " + fabric_mod_name + "_get_fabric_proto_ident(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n"
buf += " u8 proto_id;\n\n"
buf += " switch (" + fabric_mod_port + "->" + fabric_mod_port + "_proto_id) {\n"
if proto_ident == "FC":
buf += " case SCSI_PROTOCOL_FCP:\n"
buf += " default:\n"
buf += " proto_id = fc_get_fabric_proto_ident(se_tpg);\n"
buf += " break;\n"
elif proto_ident == "SAS":
buf += " case SCSI_PROTOCOL_SAS:\n"
buf += " default:\n"
buf += " proto_id = sas_get_fabric_proto_ident(se_tpg);\n"
buf += " break;\n"
elif proto_ident == "iSCSI":
buf += " case SCSI_PROTOCOL_ISCSI:\n"
buf += " default:\n"
buf += " proto_id = iscsi_get_fabric_proto_ident(se_tpg);\n"
buf += " break;\n"
buf += " }\n\n"
buf += " return proto_id;\n"
buf += "}\n\n"
bufi += "u8 " + fabric_mod_name + "_get_fabric_proto_ident(struct se_portal_group *);\n"
if re.search('get_wwn', fo):
buf += "char *" + fabric_mod_name + "_get_fabric_wwn(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n\n"
buf += " return &" + fabric_mod_port + "->" + fabric_mod_port + "_name[0];\n"
buf += "}\n\n"
bufi += "char *" + fabric_mod_name + "_get_fabric_wwn(struct se_portal_group *);\n"
if re.search('get_tag', fo):
buf += "u16 " + fabric_mod_name + "_get_tag(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " return tpg->" + fabric_mod_port + "_tpgt;\n"
buf += "}\n\n"
bufi += "u16 " + fabric_mod_name + "_get_tag(struct se_portal_group *);\n"
if re.search('get_default_depth', fo):
buf += "u32 " + fabric_mod_name + "_get_default_depth(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " return 1;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_get_default_depth(struct se_portal_group *);\n"
if re.search('get_pr_transport_id\)\(', fo):
buf += "u32 " + fabric_mod_name + "_get_pr_transport_id(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " struct se_node_acl *se_nacl,\n"
buf += " struct t10_pr_registration *pr_reg,\n"
buf += " int *format_code,\n"
buf += " unsigned char *buf)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n"
buf += " int ret = 0;\n\n"
buf += " switch (" + fabric_mod_port + "->" + fabric_mod_port + "_proto_id) {\n"
if proto_ident == "FC":
buf += " case SCSI_PROTOCOL_FCP:\n"
buf += " default:\n"
buf += " ret = fc_get_pr_transport_id(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code, buf);\n"
buf += " break;\n"
elif proto_ident == "SAS":
buf += " case SCSI_PROTOCOL_SAS:\n"
buf += " default:\n"
buf += " ret = sas_get_pr_transport_id(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code, buf);\n"
buf += " break;\n"
elif proto_ident == "iSCSI":
buf += " case SCSI_PROTOCOL_ISCSI:\n"
buf += " default:\n"
buf += " ret = iscsi_get_pr_transport_id(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code, buf);\n"
buf += " break;\n"
buf += " }\n\n"
buf += " return ret;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_get_pr_transport_id(struct se_portal_group *,\n"
bufi += " struct se_node_acl *, struct t10_pr_registration *,\n"
bufi += " int *, unsigned char *);\n"
if re.search('get_pr_transport_id_len\)\(', fo):
buf += "u32 " + fabric_mod_name + "_get_pr_transport_id_len(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " struct se_node_acl *se_nacl,\n"
buf += " struct t10_pr_registration *pr_reg,\n"
buf += " int *format_code)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n"
buf += " int ret = 0;\n\n"
buf += " switch (" + fabric_mod_port + "->" + fabric_mod_port + "_proto_id) {\n"
if proto_ident == "FC":
buf += " case SCSI_PROTOCOL_FCP:\n"
buf += " default:\n"
buf += " ret = fc_get_pr_transport_id_len(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code);\n"
buf += " break;\n"
elif proto_ident == "SAS":
buf += " case SCSI_PROTOCOL_SAS:\n"
buf += " default:\n"
buf += " ret = sas_get_pr_transport_id_len(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code);\n"
buf += " break;\n"
elif proto_ident == "iSCSI":
buf += " case SCSI_PROTOCOL_ISCSI:\n"
buf += " default:\n"
buf += " ret = iscsi_get_pr_transport_id_len(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code);\n"
buf += " break;\n"
buf += " }\n\n"
buf += " return ret;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_get_pr_transport_id_len(struct se_portal_group *,\n"
bufi += " struct se_node_acl *, struct t10_pr_registration *,\n"
bufi += " int *);\n"
if re.search('parse_pr_out_transport_id\)\(', fo):
buf += "char *" + fabric_mod_name + "_parse_pr_out_transport_id(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " const char *buf,\n"
buf += " u32 *out_tid_len,\n"
buf += " char **port_nexus_ptr)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n"
buf += " char *tid = NULL;\n\n"
buf += " switch (" + fabric_mod_port + "->" + fabric_mod_port + "_proto_id) {\n"
if proto_ident == "FC":
buf += " case SCSI_PROTOCOL_FCP:\n"
buf += " default:\n"
buf += " tid = fc_parse_pr_out_transport_id(se_tpg, buf, out_tid_len,\n"
buf += " port_nexus_ptr);\n"
elif proto_ident == "SAS":
buf += " case SCSI_PROTOCOL_SAS:\n"
buf += " default:\n"
buf += " tid = sas_parse_pr_out_transport_id(se_tpg, buf, out_tid_len,\n"
buf += " port_nexus_ptr);\n"
elif proto_ident == "iSCSI":
buf += " case SCSI_PROTOCOL_ISCSI:\n"
buf += " default:\n"
buf += " tid = iscsi_parse_pr_out_transport_id(se_tpg, buf, out_tid_len,\n"
buf += " port_nexus_ptr);\n"
buf += " }\n\n"
buf += " return tid;\n"
buf += "}\n\n"
bufi += "char *" + fabric_mod_name + "_parse_pr_out_transport_id(struct se_portal_group *,\n"
bufi += " const char *, u32 *, char **);\n"
if re.search('alloc_fabric_acl\)\(', fo):
buf += "struct se_node_acl *" + fabric_mod_name + "_alloc_fabric_acl(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_nacl *nacl;\n\n"
buf += " nacl = kzalloc(sizeof(struct " + fabric_mod_name + "_nacl), GFP_KERNEL);\n"
buf += " if (!nacl) {\n"
buf += " printk(KERN_ERR \"Unable to allocate struct " + fabric_mod_name + "_nacl\\n\");\n"
buf += " return NULL;\n"
buf += " }\n\n"
buf += " return &nacl->se_node_acl;\n"
buf += "}\n\n"
bufi += "struct se_node_acl *" + fabric_mod_name + "_alloc_fabric_acl(struct se_portal_group *);\n"
if re.search('release_fabric_acl\)\(', fo):
buf += "void " + fabric_mod_name + "_release_fabric_acl(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " struct se_node_acl *se_nacl)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_nacl *nacl = container_of(se_nacl,\n"
buf += " struct " + fabric_mod_name + "_nacl, se_node_acl);\n"
buf += " kfree(nacl);\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_release_fabric_acl(struct se_portal_group *,\n"
bufi += " struct se_node_acl *);\n"
if re.search('tpg_get_inst_index\)\(', fo):
buf += "u32 " + fabric_mod_name + "_tpg_get_inst_index(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " return 1;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_tpg_get_inst_index(struct se_portal_group *);\n"
if re.search('\*release_cmd\)\(', fo):
buf += "void " + fabric_mod_name + "_release_cmd(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_release_cmd(struct se_cmd *);\n"
if re.search('shutdown_session\)\(', fo):
buf += "int " + fabric_mod_name + "_shutdown_session(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_shutdown_session(struct se_session *);\n"
if re.search('close_session\)\(', fo):
buf += "void " + fabric_mod_name + "_close_session(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_close_session(struct se_session *);\n"
if re.search('stop_session\)\(', fo):
buf += "void " + fabric_mod_name + "_stop_session(struct se_session *se_sess, int sess_sleep , int conn_sleep)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_stop_session(struct se_session *, int, int);\n"
if re.search('fall_back_to_erl0\)\(', fo):
buf += "void " + fabric_mod_name + "_reset_nexus(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_reset_nexus(struct se_session *);\n"
if re.search('sess_logged_in\)\(', fo):
buf += "int " + fabric_mod_name + "_sess_logged_in(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_sess_logged_in(struct se_session *);\n"
if re.search('sess_get_index\)\(', fo):
buf += "u32 " + fabric_mod_name + "_sess_get_index(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_sess_get_index(struct se_session *);\n"
if re.search('write_pending\)\(', fo):
buf += "int " + fabric_mod_name + "_write_pending(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_write_pending(struct se_cmd *);\n"
if re.search('write_pending_status\)\(', fo):
buf += "int " + fabric_mod_name + "_write_pending_status(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_write_pending_status(struct se_cmd *);\n"
if re.search('set_default_node_attributes\)\(', fo):
buf += "void " + fabric_mod_name + "_set_default_node_attrs(struct se_node_acl *nacl)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_set_default_node_attrs(struct se_node_acl *);\n"
if re.search('get_task_tag\)\(', fo):
buf += "u32 " + fabric_mod_name + "_get_task_tag(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_get_task_tag(struct se_cmd *);\n"
if re.search('get_cmd_state\)\(', fo):
buf += "int " + fabric_mod_name + "_get_cmd_state(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_get_cmd_state(struct se_cmd *);\n"
if re.search('queue_data_in\)\(', fo):
buf += "int " + fabric_mod_name + "_queue_data_in(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_queue_data_in(struct se_cmd *);\n"
if re.search('queue_status\)\(', fo):
buf += "int " + fabric_mod_name + "_queue_status(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_queue_status(struct se_cmd *);\n"
if re.search('queue_tm_rsp\)\(', fo):
buf += "int " + fabric_mod_name + "_queue_tm_rsp(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_queue_tm_rsp(struct se_cmd *);\n"
if re.search('get_fabric_sense_len\)\(', fo):
buf += "u16 " + fabric_mod_name + "_get_fabric_sense_len(void)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "u16 " + fabric_mod_name + "_get_fabric_sense_len(void);\n"
if re.search('set_fabric_sense_len\)\(', fo):
buf += "u16 " + fabric_mod_name + "_set_fabric_sense_len(struct se_cmd *se_cmd, u32 sense_length)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "u16 " + fabric_mod_name + "_set_fabric_sense_len(struct se_cmd *, u32);\n"
if re.search('is_state_remove\)\(', fo):
buf += "int " + fabric_mod_name + "_is_state_remove(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_is_state_remove(struct se_cmd *);\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
ret = pi.write(bufi)
if ret:
tcm_mod_err("Unable to write fi: " + fi)
pi.close()
return
def tcm_mod_build_kbuild(fabric_mod_dir_var, fabric_mod_name):
buf = ""
f = fabric_mod_dir_var + "/Makefile"
print "Writing file: " + f
p = open(f, 'w')
if not p:
tcm_mod_err("Unable to open file: " + f)
buf += fabric_mod_name + "-objs := " + fabric_mod_name + "_fabric.o \\\n"
buf += " " + fabric_mod_name + "_configfs.o\n"
buf += "obj-$(CONFIG_" + fabric_mod_name.upper() + ") += " + fabric_mod_name + ".o\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
return
def tcm_mod_build_kconfig(fabric_mod_dir_var, fabric_mod_name):
buf = ""
f = fabric_mod_dir_var + "/Kconfig"
print "Writing file: " + f
p = open(f, 'w')
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "config " + fabric_mod_name.upper() + "\n"
buf += " tristate \"" + fabric_mod_name.upper() + " fabric module\"\n"
buf += " depends on TARGET_CORE && CONFIGFS_FS\n"
buf += " default n\n"
buf += " ---help---\n"
buf += " Say Y here to enable the " + fabric_mod_name.upper() + " fabric module\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
return
def tcm_mod_add_kbuild(tcm_dir, fabric_mod_name):
buf = "obj-$(CONFIG_" + fabric_mod_name.upper() + ") += " + fabric_mod_name.lower() + "/\n"
kbuild = tcm_dir + "/drivers/target/Makefile"
f = open(kbuild, 'a')
f.write(buf)
f.close()
return
def tcm_mod_add_kconfig(tcm_dir, fabric_mod_name):
buf = "source \"drivers/target/" + fabric_mod_name.lower() + "/Kconfig\"\n"
kconfig = tcm_dir + "/drivers/target/Kconfig"
f = open(kconfig, 'a')
f.write(buf)
f.close()
return
def main(modname, proto_ident):
# proto_ident = "FC"
# proto_ident = "SAS"
# proto_ident = "iSCSI"
tcm_dir = os.getcwd();
tcm_dir += "/../../"
print "tcm_dir: " + tcm_dir
fabric_mod_name = modname
fabric_mod_dir = tcm_dir + "drivers/target/" + fabric_mod_name
print "Set fabric_mod_name: " + fabric_mod_name
print "Set fabric_mod_dir: " + fabric_mod_dir
print "Using proto_ident: " + proto_ident
if proto_ident != "FC" and proto_ident != "SAS" and proto_ident != "iSCSI":
print "Unsupported proto_ident: " + proto_ident
sys.exit(1)
ret = tcm_mod_create_module_subdir(fabric_mod_dir)
if ret:
print "tcm_mod_create_module_subdir() failed because module already exists!"
sys.exit(1)
tcm_mod_build_base_includes(proto_ident, fabric_mod_dir, fabric_mod_name)
tcm_mod_scan_fabric_ops(tcm_dir)
tcm_mod_dump_fabric_ops(proto_ident, fabric_mod_dir, fabric_mod_name)
tcm_mod_build_configfs(proto_ident, fabric_mod_dir, fabric_mod_name)
tcm_mod_build_kbuild(fabric_mod_dir, fabric_mod_name)
tcm_mod_build_kconfig(fabric_mod_dir, fabric_mod_name)
input = raw_input("Would you like to add " + fabric_mod_name + "to drivers/target/Makefile..? [yes,no]: ")
if input == "yes" or input == "y":
tcm_mod_add_kbuild(tcm_dir, fabric_mod_name)
input = raw_input("Would you like to add " + fabric_mod_name + "to drivers/target/Kconfig..? [yes,no]: ")
if input == "yes" or input == "y":
tcm_mod_add_kconfig(tcm_dir, fabric_mod_name)
return
parser = optparse.OptionParser()
parser.add_option('-m', '--modulename', help='Module name', dest='modname',
action='store', nargs=1, type='string')
parser.add_option('-p', '--protoident', help='Protocol Ident', dest='protoident',
action='store', nargs=1, type='string')
(opts, args) = parser.parse_args()
mandatories = ['modname', 'protoident']
for m in mandatories:
if not opts.__dict__[m]:
print "mandatory option is missing\n"
parser.print_help()
exit(-1)
if __name__ == "__main__":
main(str(opts.modname), opts.protoident)
|
gpl-2.0
|
keen99/SickRage
|
tornado/test/twisted_test.py
|
18
|
25239
|
# Author: Ovidiu Predescu
# Date: July 2011
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Unittest for the twisted-style reactor.
"""
from __future__ import absolute_import, division, print_function, with_statement
import logging
import os
import shutil
import signal
import sys
import tempfile
import threading
import warnings
try:
import fcntl
from twisted.internet.defer import Deferred, inlineCallbacks, returnValue
from twisted.internet.interfaces import IReadDescriptor, IWriteDescriptor
from twisted.internet.protocol import Protocol
from twisted.python import log
from tornado.platform.twisted import TornadoReactor, TwistedIOLoop
from zope.interface import implementer
have_twisted = True
except ImportError:
have_twisted = False
# The core of Twisted 12.3.0 is available on python 3, but twisted.web is not
# so test for it separately.
try:
from twisted.web.client import Agent, readBody
from twisted.web.resource import Resource
from twisted.web.server import Site
# As of Twisted 15.0.0, twisted.web is present but fails our
# tests due to internal str/bytes errors.
have_twisted_web = sys.version_info < (3,)
except ImportError:
have_twisted_web = False
try:
import thread # py2
except ImportError:
import _thread as thread # py3
from tornado.escape import utf8
from tornado import gen
from tornado.httpclient import AsyncHTTPClient
from tornado.httpserver import HTTPServer
from tornado.ioloop import IOLoop
from tornado.platform.auto import set_close_exec
from tornado.platform.select import SelectIOLoop
from tornado.testing import bind_unused_port
from tornado.test.util import unittest
from tornado.util import import_object
from tornado.web import RequestHandler, Application
skipIfNoTwisted = unittest.skipUnless(have_twisted,
"twisted module not present")
skipIfNoSingleDispatch = unittest.skipIf(
gen.singledispatch is None, "singledispatch module not present")
def save_signal_handlers():
saved = {}
for sig in [signal.SIGINT, signal.SIGTERM, signal.SIGCHLD]:
saved[sig] = signal.getsignal(sig)
if "twisted" in repr(saved):
if not issubclass(IOLoop.configured_class(), TwistedIOLoop):
# when the global ioloop is twisted, we expect the signal
# handlers to be installed. Otherwise, it means we're not
# cleaning up after twisted properly.
raise Exception("twisted signal handlers already installed")
return saved
def restore_signal_handlers(saved):
for sig, handler in saved.items():
signal.signal(sig, handler)
class ReactorTestCase(unittest.TestCase):
def setUp(self):
self._saved_signals = save_signal_handlers()
self._io_loop = IOLoop()
self._reactor = TornadoReactor(self._io_loop)
def tearDown(self):
self._io_loop.close(all_fds=True)
restore_signal_handlers(self._saved_signals)
@skipIfNoTwisted
class ReactorWhenRunningTest(ReactorTestCase):
def test_whenRunning(self):
self._whenRunningCalled = False
self._anotherWhenRunningCalled = False
self._reactor.callWhenRunning(self.whenRunningCallback)
self._reactor.run()
self.assertTrue(self._whenRunningCalled)
self.assertTrue(self._anotherWhenRunningCalled)
def whenRunningCallback(self):
self._whenRunningCalled = True
self._reactor.callWhenRunning(self.anotherWhenRunningCallback)
self._reactor.stop()
def anotherWhenRunningCallback(self):
self._anotherWhenRunningCalled = True
@skipIfNoTwisted
class ReactorCallLaterTest(ReactorTestCase):
def test_callLater(self):
self._laterCalled = False
self._now = self._reactor.seconds()
self._timeout = 0.001
dc = self._reactor.callLater(self._timeout, self.callLaterCallback)
self.assertEqual(self._reactor.getDelayedCalls(), [dc])
self._reactor.run()
self.assertTrue(self._laterCalled)
self.assertTrue(self._called - self._now > self._timeout)
self.assertEqual(self._reactor.getDelayedCalls(), [])
def callLaterCallback(self):
self._laterCalled = True
self._called = self._reactor.seconds()
self._reactor.stop()
@skipIfNoTwisted
class ReactorTwoCallLaterTest(ReactorTestCase):
def test_callLater(self):
self._later1Called = False
self._later2Called = False
self._now = self._reactor.seconds()
self._timeout1 = 0.0005
dc1 = self._reactor.callLater(self._timeout1, self.callLaterCallback1)
self._timeout2 = 0.001
dc2 = self._reactor.callLater(self._timeout2, self.callLaterCallback2)
self.assertTrue(self._reactor.getDelayedCalls() == [dc1, dc2] or
self._reactor.getDelayedCalls() == [dc2, dc1])
self._reactor.run()
self.assertTrue(self._later1Called)
self.assertTrue(self._later2Called)
self.assertTrue(self._called1 - self._now > self._timeout1)
self.assertTrue(self._called2 - self._now > self._timeout2)
self.assertEqual(self._reactor.getDelayedCalls(), [])
def callLaterCallback1(self):
self._later1Called = True
self._called1 = self._reactor.seconds()
def callLaterCallback2(self):
self._later2Called = True
self._called2 = self._reactor.seconds()
self._reactor.stop()
@skipIfNoTwisted
class ReactorCallFromThreadTest(ReactorTestCase):
def setUp(self):
super(ReactorCallFromThreadTest, self).setUp()
self._mainThread = thread.get_ident()
def tearDown(self):
self._thread.join()
super(ReactorCallFromThreadTest, self).tearDown()
def _newThreadRun(self):
self.assertNotEqual(self._mainThread, thread.get_ident())
if hasattr(self._thread, 'ident'): # new in python 2.6
self.assertEqual(self._thread.ident, thread.get_ident())
self._reactor.callFromThread(self._fnCalledFromThread)
def _fnCalledFromThread(self):
self.assertEqual(self._mainThread, thread.get_ident())
self._reactor.stop()
def _whenRunningCallback(self):
self._thread = threading.Thread(target=self._newThreadRun)
self._thread.start()
def testCallFromThread(self):
self._reactor.callWhenRunning(self._whenRunningCallback)
self._reactor.run()
@skipIfNoTwisted
class ReactorCallInThread(ReactorTestCase):
def setUp(self):
super(ReactorCallInThread, self).setUp()
self._mainThread = thread.get_ident()
def _fnCalledInThread(self, *args, **kwargs):
self.assertNotEqual(thread.get_ident(), self._mainThread)
self._reactor.callFromThread(lambda: self._reactor.stop())
def _whenRunningCallback(self):
self._reactor.callInThread(self._fnCalledInThread)
def testCallInThread(self):
self._reactor.callWhenRunning(self._whenRunningCallback)
self._reactor.run()
class Reader(object):
def __init__(self, fd, callback):
self._fd = fd
self._callback = callback
def logPrefix(self):
return "Reader"
def close(self):
self._fd.close()
def fileno(self):
return self._fd.fileno()
def readConnectionLost(self, reason):
self.close()
def connectionLost(self, reason):
self.close()
def doRead(self):
self._callback(self._fd)
if have_twisted:
Reader = implementer(IReadDescriptor)(Reader)
class Writer(object):
def __init__(self, fd, callback):
self._fd = fd
self._callback = callback
def logPrefix(self):
return "Writer"
def close(self):
self._fd.close()
def fileno(self):
return self._fd.fileno()
def connectionLost(self, reason):
self.close()
def doWrite(self):
self._callback(self._fd)
if have_twisted:
Writer = implementer(IWriteDescriptor)(Writer)
@skipIfNoTwisted
class ReactorReaderWriterTest(ReactorTestCase):
def _set_nonblocking(self, fd):
flags = fcntl.fcntl(fd, fcntl.F_GETFL)
fcntl.fcntl(fd, fcntl.F_SETFL, flags | os.O_NONBLOCK)
def setUp(self):
super(ReactorReaderWriterTest, self).setUp()
r, w = os.pipe()
self._set_nonblocking(r)
self._set_nonblocking(w)
set_close_exec(r)
set_close_exec(w)
self._p1 = os.fdopen(r, "rb", 0)
self._p2 = os.fdopen(w, "wb", 0)
def tearDown(self):
super(ReactorReaderWriterTest, self).tearDown()
self._p1.close()
self._p2.close()
def _testReadWrite(self):
"""
In this test the writer writes an 'x' to its fd. The reader
reads it, check the value and ends the test.
"""
self.shouldWrite = True
def checkReadInput(fd):
self.assertEquals(fd.read(1), b'x')
self._reactor.stop()
def writeOnce(fd):
if self.shouldWrite:
self.shouldWrite = False
fd.write(b'x')
self._reader = Reader(self._p1, checkReadInput)
self._writer = Writer(self._p2, writeOnce)
self._reactor.addWriter(self._writer)
# Test that adding the reader twice adds it only once to
# IOLoop.
self._reactor.addReader(self._reader)
self._reactor.addReader(self._reader)
def testReadWrite(self):
self._reactor.callWhenRunning(self._testReadWrite)
self._reactor.run()
def _testNoWriter(self):
"""
In this test we have no writer. Make sure the reader doesn't
read anything.
"""
def checkReadInput(fd):
self.fail("Must not be called.")
def stopTest():
# Close the writer here since the IOLoop doesn't know
# about it.
self._writer.close()
self._reactor.stop()
self._reader = Reader(self._p1, checkReadInput)
# We create a writer, but it should never be invoked.
self._writer = Writer(self._p2, lambda fd: fd.write('x'))
# Test that adding and removing the writer leaves us with no writer.
self._reactor.addWriter(self._writer)
self._reactor.removeWriter(self._writer)
# Test that adding and removing the reader doesn't cause
# unintended effects.
self._reactor.addReader(self._reader)
# Wake up after a moment and stop the test
self._reactor.callLater(0.001, stopTest)
def testNoWriter(self):
self._reactor.callWhenRunning(self._testNoWriter)
self._reactor.run()
# Test various combinations of twisted and tornado http servers,
# http clients, and event loop interfaces.
@skipIfNoTwisted
@unittest.skipIf(not have_twisted_web, 'twisted web not present')
class CompatibilityTests(unittest.TestCase):
def setUp(self):
self.saved_signals = save_signal_handlers()
self.io_loop = IOLoop()
self.io_loop.make_current()
self.reactor = TornadoReactor(self.io_loop)
def tearDown(self):
self.reactor.disconnectAll()
self.io_loop.clear_current()
self.io_loop.close(all_fds=True)
restore_signal_handlers(self.saved_signals)
def start_twisted_server(self):
class HelloResource(Resource):
isLeaf = True
def render_GET(self, request):
return "Hello from twisted!"
site = Site(HelloResource())
port = self.reactor.listenTCP(0, site, interface='127.0.0.1')
self.twisted_port = port.getHost().port
def start_tornado_server(self):
class HelloHandler(RequestHandler):
def get(self):
self.write("Hello from tornado!")
app = Application([('/', HelloHandler)],
log_function=lambda x: None)
server = HTTPServer(app, io_loop=self.io_loop)
sock, self.tornado_port = bind_unused_port()
server.add_sockets([sock])
def run_ioloop(self):
self.stop_loop = self.io_loop.stop
self.io_loop.start()
self.reactor.fireSystemEvent('shutdown')
def run_reactor(self):
self.stop_loop = self.reactor.stop
self.stop = self.reactor.stop
self.reactor.run()
def tornado_fetch(self, url, runner):
responses = []
client = AsyncHTTPClient(self.io_loop)
def callback(response):
responses.append(response)
self.stop_loop()
client.fetch(url, callback=callback)
runner()
self.assertEqual(len(responses), 1)
responses[0].rethrow()
return responses[0]
def twisted_fetch(self, url, runner):
# http://twistedmatrix.com/documents/current/web/howto/client.html
chunks = []
client = Agent(self.reactor)
d = client.request(b'GET', utf8(url))
class Accumulator(Protocol):
def __init__(self, finished):
self.finished = finished
def dataReceived(self, data):
chunks.append(data)
def connectionLost(self, reason):
self.finished.callback(None)
def callback(response):
finished = Deferred()
response.deliverBody(Accumulator(finished))
return finished
d.addCallback(callback)
def shutdown(failure):
if hasattr(self, 'stop_loop'):
self.stop_loop()
elif failure is not None:
# loop hasn't been initialized yet; try our best to
# get an error message out. (the runner() interaction
# should probably be refactored).
try:
failure.raiseException()
except:
logging.error('exception before starting loop', exc_info=True)
d.addBoth(shutdown)
runner()
self.assertTrue(chunks)
return ''.join(chunks)
def twisted_coroutine_fetch(self, url, runner):
body = [None]
@gen.coroutine
def f():
# This is simpler than the non-coroutine version, but it cheats
# by reading the body in one blob instead of streaming it with
# a Protocol.
client = Agent(self.reactor)
response = yield client.request(b'GET', utf8(url))
with warnings.catch_warnings():
# readBody has a buggy DeprecationWarning in Twisted 15.0:
# https://twistedmatrix.com/trac/changeset/43379
warnings.simplefilter('ignore', category=DeprecationWarning)
body[0] = yield readBody(response)
self.stop_loop()
self.io_loop.add_callback(f)
runner()
return body[0]
def testTwistedServerTornadoClientIOLoop(self):
self.start_twisted_server()
response = self.tornado_fetch(
'http://127.0.0.1:%d' % self.twisted_port, self.run_ioloop)
self.assertEqual(response.body, 'Hello from twisted!')
def testTwistedServerTornadoClientReactor(self):
self.start_twisted_server()
response = self.tornado_fetch(
'http://127.0.0.1:%d' % self.twisted_port, self.run_reactor)
self.assertEqual(response.body, 'Hello from twisted!')
def testTornadoServerTwistedClientIOLoop(self):
self.start_tornado_server()
response = self.twisted_fetch(
'http://127.0.0.1:%d' % self.tornado_port, self.run_ioloop)
self.assertEqual(response, 'Hello from tornado!')
def testTornadoServerTwistedClientReactor(self):
self.start_tornado_server()
response = self.twisted_fetch(
'http://127.0.0.1:%d' % self.tornado_port, self.run_reactor)
self.assertEqual(response, 'Hello from tornado!')
@skipIfNoSingleDispatch
def testTornadoServerTwistedCoroutineClientIOLoop(self):
self.start_tornado_server()
response = self.twisted_coroutine_fetch(
'http://127.0.0.1:%d' % self.tornado_port, self.run_ioloop)
self.assertEqual(response, 'Hello from tornado!')
@skipIfNoTwisted
@skipIfNoSingleDispatch
class ConvertDeferredTest(unittest.TestCase):
def test_success(self):
@inlineCallbacks
def fn():
if False:
# inlineCallbacks doesn't work with regular functions;
# must have a yield even if it's unreachable.
yield
returnValue(42)
f = gen.convert_yielded(fn())
self.assertEqual(f.result(), 42)
def test_failure(self):
@inlineCallbacks
def fn():
if False:
yield
1 / 0
f = gen.convert_yielded(fn())
with self.assertRaises(ZeroDivisionError):
f.result()
if have_twisted:
# Import and run as much of twisted's test suite as possible.
# This is unfortunately rather dependent on implementation details,
# but there doesn't appear to be a clean all-in-one conformance test
# suite for reactors.
#
# This is a list of all test suites using the ReactorBuilder
# available in Twisted 11.0.0 and 11.1.0 (and a blacklist of
# specific test methods to be disabled).
twisted_tests = {
'twisted.internet.test.test_core.ObjectModelIntegrationTest': [],
'twisted.internet.test.test_core.SystemEventTestsBuilder': [
'test_iterate', # deliberately not supported
# Fails on TwistedIOLoop and AsyncIOLoop.
'test_runAfterCrash',
],
'twisted.internet.test.test_fdset.ReactorFDSetTestsBuilder': [
"test_lostFileDescriptor", # incompatible with epoll and kqueue
],
'twisted.internet.test.test_process.ProcessTestsBuilder': [
# Only work as root. Twisted's "skip" functionality works
# with py27+, but not unittest2 on py26.
'test_changeGID',
'test_changeUID',
],
# Process tests appear to work on OSX 10.7, but not 10.6
#'twisted.internet.test.test_process.PTYProcessTestsBuilder': [
# 'test_systemCallUninterruptedByChildExit',
# ],
'twisted.internet.test.test_tcp.TCPClientTestsBuilder': [
'test_badContext', # ssl-related; see also SSLClientTestsMixin
],
'twisted.internet.test.test_tcp.TCPPortTestsBuilder': [
# These use link-local addresses and cause firewall prompts on mac
'test_buildProtocolIPv6AddressScopeID',
'test_portGetHostOnIPv6ScopeID',
'test_serverGetHostOnIPv6ScopeID',
'test_serverGetPeerOnIPv6ScopeID',
],
'twisted.internet.test.test_tcp.TCPConnectionTestsBuilder': [],
'twisted.internet.test.test_tcp.WriteSequenceTests': [],
'twisted.internet.test.test_tcp.AbortConnectionTestCase': [],
'twisted.internet.test.test_threads.ThreadTestsBuilder': [],
'twisted.internet.test.test_time.TimeTestsBuilder': [],
# Extra third-party dependencies (pyOpenSSL)
#'twisted.internet.test.test_tls.SSLClientTestsMixin': [],
'twisted.internet.test.test_udp.UDPServerTestsBuilder': [],
'twisted.internet.test.test_unix.UNIXTestsBuilder': [
# Platform-specific. These tests would be skipped automatically
# if we were running twisted's own test runner.
'test_connectToLinuxAbstractNamespace',
'test_listenOnLinuxAbstractNamespace',
# These tests use twisted's sendmsg.c extension and sometimes
# fail with what looks like uninitialized memory errors
# (more common on pypy than cpython, but I've seen it on both)
'test_sendFileDescriptor',
'test_sendFileDescriptorTriggersPauseProducing',
'test_descriptorDeliveredBeforeBytes',
'test_avoidLeakingFileDescriptors',
],
'twisted.internet.test.test_unix.UNIXDatagramTestsBuilder': [
'test_listenOnLinuxAbstractNamespace',
],
'twisted.internet.test.test_unix.UNIXPortTestsBuilder': [],
}
for test_name, blacklist in twisted_tests.items():
try:
test_class = import_object(test_name)
except (ImportError, AttributeError):
continue
for test_func in blacklist:
if hasattr(test_class, test_func):
# The test_func may be defined in a mixin, so clobber
# it instead of delattr()
setattr(test_class, test_func, lambda self: None)
def make_test_subclass(test_class):
class TornadoTest(test_class):
_reactors = ["tornado.platform.twisted._TestReactor"]
def setUp(self):
# Twisted's tests expect to be run from a temporary
# directory; they create files in their working directory
# and don't always clean up after themselves.
self.__curdir = os.getcwd()
self.__tempdir = tempfile.mkdtemp()
os.chdir(self.__tempdir)
super(TornadoTest, self).setUp()
def tearDown(self):
super(TornadoTest, self).tearDown()
os.chdir(self.__curdir)
shutil.rmtree(self.__tempdir)
def buildReactor(self):
self.__saved_signals = save_signal_handlers()
return test_class.buildReactor(self)
def unbuildReactor(self, reactor):
test_class.unbuildReactor(self, reactor)
# Clean up file descriptors (especially epoll/kqueue
# objects) eagerly instead of leaving them for the
# GC. Unfortunately we can't do this in reactor.stop
# since twisted expects to be able to unregister
# connections in a post-shutdown hook.
reactor._io_loop.close(all_fds=True)
restore_signal_handlers(self.__saved_signals)
TornadoTest.__name__ = test_class.__name__
return TornadoTest
test_subclass = make_test_subclass(test_class)
globals().update(test_subclass.makeTestCaseClasses())
# Since we're not using twisted's test runner, it's tricky to get
# logging set up well. Most of the time it's easiest to just
# leave it turned off, but while working on these tests you may want
# to uncomment one of the other lines instead.
log.defaultObserver.stop()
# import sys; log.startLogging(sys.stderr, setStdout=0)
# log.startLoggingWithObserver(log.PythonLoggingObserver().emit, setStdout=0)
# import logging; logging.getLogger('twisted').setLevel(logging.WARNING)
if have_twisted:
class LayeredTwistedIOLoop(TwistedIOLoop):
"""Layers a TwistedIOLoop on top of a TornadoReactor on a SelectIOLoop.
This is of course silly, but is useful for testing purposes to make
sure we're implementing both sides of the various interfaces
correctly. In some tests another TornadoReactor is layered on top
of the whole stack.
"""
def initialize(self):
# When configured to use LayeredTwistedIOLoop we can't easily
# get the next-best IOLoop implementation, so use the lowest common
# denominator.
self.real_io_loop = SelectIOLoop()
reactor = TornadoReactor(io_loop=self.real_io_loop)
super(LayeredTwistedIOLoop, self).initialize(reactor=reactor)
self.add_callback(self.make_current)
def close(self, all_fds=False):
super(LayeredTwistedIOLoop, self).close(all_fds=all_fds)
# HACK: This is the same thing that test_class.unbuildReactor does.
for reader in self.reactor._internalReaders:
self.reactor.removeReader(reader)
reader.connectionLost(None)
self.real_io_loop.close(all_fds=all_fds)
def stop(self):
# One of twisted's tests fails if I don't delay crash()
# until the reactor has started, but if I move this to
# TwistedIOLoop then the tests fail when I'm *not* running
# tornado-on-twisted-on-tornado. I'm clearly missing something
# about the startup/crash semantics, but since stop and crash
# are really only used in tests it doesn't really matter.
self.reactor.callWhenRunning(self.reactor.crash)
if __name__ == "__main__":
unittest.main()
|
gpl-3.0
|
emawind84/readthedocs.org
|
readthedocs/projects/feeds.py
|
26
|
1035
|
"""Project RSS feeds"""
from django.contrib.syndication.views import Feed
from readthedocs.projects.models import Project
class LatestProjectsFeed(Feed):
"""RSS feed for projects that were recently updated"""
title = "Recently updated documentation"
link = "http://readthedocs.org"
description = "Recently updated documentation on Read the Docs"
def items(self):
return Project.objects.public().order_by('-modified_date')[:10]
def item_title(self, item):
return item.name
def item_description(self, item):
return item.get_latest_build()
class NewProjectsFeed(Feed):
"""RSS feed for newly created projects"""
title = "Newest documentation"
link = "http://readthedocs.org"
description = "Recently created documentation on Read the Docs"
def items(self):
return Project.objects.public().order_by('-pk')[:10]
def item_title(self, item):
return item.name
def item_description(self, item):
return item.get_latest_build()
|
mit
|
dreamsxin/kbengine
|
kbe/src/lib/python/Lib/hashlib.py
|
82
|
7983
|
#. Copyright (C) 2005-2010 Gregory P. Smith ([email protected])
# Licensed to PSF under a Contributor Agreement.
#
__doc__ = """hashlib module - A common interface to many hash functions.
new(name, data=b'') - returns a new hash object implementing the
given hash function; initializing the hash
using the given binary data.
Named constructor functions are also available, these are faster
than using new(name):
md5(), sha1(), sha224(), sha256(), sha384(), and sha512()
More algorithms may be available on your platform but the above are guaranteed
to exist. See the algorithms_guaranteed and algorithms_available attributes
to find out what algorithm names can be passed to new().
NOTE: If you want the adler32 or crc32 hash functions they are available in
the zlib module.
Choose your hash function wisely. Some have known collision weaknesses.
sha384 and sha512 will be slow on 32 bit platforms.
Hash objects have these methods:
- update(arg): Update the hash object with the bytes in arg. Repeated calls
are equivalent to a single call with the concatenation of all
the arguments.
- digest(): Return the digest of the bytes passed to the update() method
so far.
- hexdigest(): Like digest() except the digest is returned as a unicode
object of double length, containing only hexadecimal digits.
- copy(): Return a copy (clone) of the hash object. This can be used to
efficiently compute the digests of strings that share a common
initial substring.
For example, to obtain the digest of the string 'Nobody inspects the
spammish repetition':
>>> import hashlib
>>> m = hashlib.md5()
>>> m.update(b"Nobody inspects")
>>> m.update(b" the spammish repetition")
>>> m.digest()
b'\\xbbd\\x9c\\x83\\xdd\\x1e\\xa5\\xc9\\xd9\\xde\\xc9\\xa1\\x8d\\xf0\\xff\\xe9'
More condensed:
>>> hashlib.sha224(b"Nobody inspects the spammish repetition").hexdigest()
'a4337bc45a8fc544c03f52dc550cd6e1e87021bc896588bd79e901e2'
"""
# This tuple and __get_builtin_constructor() must be modified if a new
# always available algorithm is added.
__always_supported = ('md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512')
algorithms_guaranteed = set(__always_supported)
algorithms_available = set(__always_supported)
__all__ = __always_supported + ('new', 'algorithms_guaranteed',
'algorithms_available', 'pbkdf2_hmac')
__builtin_constructor_cache = {}
def __get_builtin_constructor(name):
cache = __builtin_constructor_cache
constructor = cache.get(name)
if constructor is not None:
return constructor
try:
if name in ('SHA1', 'sha1'):
import _sha1
cache['SHA1'] = cache['sha1'] = _sha1.sha1
elif name in ('MD5', 'md5'):
import _md5
cache['MD5'] = cache['md5'] = _md5.md5
elif name in ('SHA256', 'sha256', 'SHA224', 'sha224'):
import _sha256
cache['SHA224'] = cache['sha224'] = _sha256.sha224
cache['SHA256'] = cache['sha256'] = _sha256.sha256
elif name in ('SHA512', 'sha512', 'SHA384', 'sha384'):
import _sha512
cache['SHA384'] = cache['sha384'] = _sha512.sha384
cache['SHA512'] = cache['sha512'] = _sha512.sha512
except ImportError:
pass # no extension module, this hash is unsupported.
constructor = cache.get(name)
if constructor is not None:
return constructor
raise ValueError('unsupported hash type ' + name)
def __get_openssl_constructor(name):
try:
f = getattr(_hashlib, 'openssl_' + name)
# Allow the C module to raise ValueError. The function will be
# defined but the hash not actually available thanks to OpenSSL.
f()
# Use the C function directly (very fast)
return f
except (AttributeError, ValueError):
return __get_builtin_constructor(name)
def __py_new(name, data=b''):
"""new(name, data=b'') - Return a new hashing object using the named algorithm;
optionally initialized with data (which must be bytes).
"""
return __get_builtin_constructor(name)(data)
def __hash_new(name, data=b''):
"""new(name, data=b'') - Return a new hashing object using the named algorithm;
optionally initialized with data (which must be bytes).
"""
try:
return _hashlib.new(name, data)
except ValueError:
# If the _hashlib module (OpenSSL) doesn't support the named
# hash, try using our builtin implementations.
# This allows for SHA224/256 and SHA384/512 support even though
# the OpenSSL library prior to 0.9.8 doesn't provide them.
return __get_builtin_constructor(name)(data)
try:
import _hashlib
new = __hash_new
__get_hash = __get_openssl_constructor
algorithms_available = algorithms_available.union(
_hashlib.openssl_md_meth_names)
except ImportError:
new = __py_new
__get_hash = __get_builtin_constructor
try:
# OpenSSL's PKCS5_PBKDF2_HMAC requires OpenSSL 1.0+ with HMAC and SHA
from _hashlib import pbkdf2_hmac
except ImportError:
_trans_5C = bytes((x ^ 0x5C) for x in range(256))
_trans_36 = bytes((x ^ 0x36) for x in range(256))
def pbkdf2_hmac(hash_name, password, salt, iterations, dklen=None):
"""Password based key derivation function 2 (PKCS #5 v2.0)
This Python implementations based on the hmac module about as fast
as OpenSSL's PKCS5_PBKDF2_HMAC for short passwords and much faster
for long passwords.
"""
if not isinstance(hash_name, str):
raise TypeError(hash_name)
if not isinstance(password, (bytes, bytearray)):
password = bytes(memoryview(password))
if not isinstance(salt, (bytes, bytearray)):
salt = bytes(memoryview(salt))
# Fast inline HMAC implementation
inner = new(hash_name)
outer = new(hash_name)
blocksize = getattr(inner, 'block_size', 64)
if len(password) > blocksize:
password = new(hash_name, password).digest()
password = password + b'\x00' * (blocksize - len(password))
inner.update(password.translate(_trans_36))
outer.update(password.translate(_trans_5C))
def prf(msg, inner=inner, outer=outer):
# PBKDF2_HMAC uses the password as key. We can re-use the same
# digest objects and and just update copies to skip initialization.
icpy = inner.copy()
ocpy = outer.copy()
icpy.update(msg)
ocpy.update(icpy.digest())
return ocpy.digest()
if iterations < 1:
raise ValueError(iterations)
if dklen is None:
dklen = outer.digest_size
if dklen < 1:
raise ValueError(dklen)
dkey = b''
loop = 1
from_bytes = int.from_bytes
while len(dkey) < dklen:
prev = prf(salt + loop.to_bytes(4, 'big'))
# endianess doesn't matter here as long to / from use the same
rkey = int.from_bytes(prev, 'big')
for i in range(iterations - 1):
prev = prf(prev)
# rkey = rkey ^ prev
rkey ^= from_bytes(prev, 'big')
loop += 1
dkey += rkey.to_bytes(inner.digest_size, 'big')
return dkey[:dklen]
for __func_name in __always_supported:
# try them all, some may not work due to the OpenSSL
# version not supporting that algorithm.
try:
globals()[__func_name] = __get_hash(__func_name)
except ValueError:
import logging
logging.exception('code for hash %s was not found.', __func_name)
# Cleanup locals()
del __always_supported, __func_name, __get_hash
del __py_new, __hash_new, __get_openssl_constructor
|
lgpl-3.0
|
sunpeak/MITMf
|
core/packetfilter.py
|
4
|
1221
|
import threading
from core.utils import set_ip_forwarding, iptables
from core.logger import logger
from scapy.all import *
from traceback import print_exc
from netfilterqueue import NetfilterQueue
formatter = logging.Formatter("%(asctime)s [PacketFilter] %(message)s", datefmt="%Y-%m-%d %H:%M:%S")
log = logger().setup_logger("PacketFilter", formatter)
class PacketFilter:
def __init__(self, filter):
self.filter = filter
def start(self):
set_ip_forwarding(1)
iptables().NFQUEUE()
self.nfqueue = NetfilterQueue()
self.nfqueue.bind(1, self.modify)
t = threading.Thread(name='packetparser', target=self.nfqueue.run)
t.setDaemon(True)
t.start()
def modify(self, pkt):
#log.debug("Got packet")
data = pkt.get_payload()
packet = IP(data)
try:
execfile(self.filter)
except Exception:
log.debug("Error occurred in filter")
print_exc()
pkt.set_payload(str(packet)) #set the packet content to our modified version
pkt.accept() #accept the packet
def stop(self):
self.nfqueue.unbind()
set_ip_forwarding(0)
iptables().flush()
|
gpl-3.0
|
mgit-at/ansible
|
lib/ansible/plugins/terminal/asa.py
|
42
|
2417
|
#
# (c) 2016 Red Hat Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import re
import json
from ansible.errors import AnsibleConnectionFailure
from ansible.module_utils._text import to_text, to_bytes
from ansible.plugins.terminal import TerminalBase
class TerminalModule(TerminalBase):
terminal_stdout_re = [
re.compile(br"[\r\n]?[\w+\-\.:\/\[\]]+(?:\([^\)]+\)){,3}(?:>|#) ?$"),
re.compile(br"\[\w+\@[\w\-\.]+(?: [^\]])\] ?[>#\$] ?$")
]
terminal_stderr_re = [
re.compile(br"error:", re.I),
re.compile(br"Removing.* not allowed, it is being used")
]
def on_open_shell(self):
if self._get_prompt().strip().endswith(b'#'):
self.disable_pager()
def disable_pager(self):
cmd = {u'command': u'no terminal pager'}
try:
self._exec_cli_command(u'no terminal pager')
except AnsibleConnectionFailure:
raise AnsibleConnectionFailure('unable to disable terminal pager')
def on_become(self, passwd=None):
if self._get_prompt().strip().endswith(b'#'):
return
cmd = {u'command': u'enable'}
if passwd:
# Note: python-3.5 cannot combine u"" and r"" together. Thus make
# an r string and use to_text to ensure it's text on both py2 and py3.
cmd[u'prompt'] = to_text(r"[\r\n]?[Pp]assword: $", errors='surrogate_or_strict')
cmd[u'answer'] = passwd
try:
self._exec_cli_command(to_bytes(json.dumps(cmd), errors='surrogate_or_strict'))
except AnsibleConnectionFailure:
raise AnsibleConnectionFailure('unable to elevate privilege to enable mode')
self.disable_pager()
|
gpl-3.0
|
rwakulszowa/servo
|
tests/wpt/web-platform-tests/service-workers/tools/blink-import.py
|
88
|
6816
|
import os
import re
import shutil
import glob
import tempfile
import sys
from collections import defaultdict
here = os.path.abspath(os.path.split(__file__)[0])
def get_extra_files(chromium_root):
return [(os.path.join(chromium_root, "LayoutTests", "http", "tests", "resources", "testharness-helpers.js"),
os.path.join("resources", "testharness-helpers.js"))]
resources_re = re.compile("/?(?:\.\./)*resources/(testharness(?:report)?)\.js")
def resources_path(line, depth):
return False, resources_re.sub(r"/resources/\1.js", line)
php_re = re.compile("\.php")
def python_to_php(line, depth):
return False, php_re.sub(".py", line)
abs_testharness_helpers_re = re.compile("([\"'])/resources/testharness-helpers.js")
testharness_helpers_re = re.compile("\.\./((?:\.\./)*)resources/testharness-helpers.js")
def testharness_helpers(line, depth):
if abs_testharness_helpers_re.findall(line):
return False, abs_testharness_helpers_re.sub(r"\1%sresources/testharness-helpers.js" % ("../" * (depth - 1)), line)
return False, testharness_helpers_re.sub(r"\1resources/testharness-helpers.js", line)
serviceworker_path_re = re.compile("/serviceworker/")
def service_worker_path(line, depth):
return False, serviceworker_path_re.sub("/service-workers/", line)
localhost_re = re.compile("localhost")
alt_host_re = re.compile("127\.0\.0\.1")
port_http_re = re.compile("8000")
port_https_re = re.compile("8000")
def server_names(line, depth):
line, count_0 = localhost_re.subn("{{host}}", line)
line, count_1 = alt_host_re.subn("{{domains[www]}}", line)
line, count_2 = port_http_re.subn("{{ports[http][0]}}", line)
line, count_3 = port_https_re.subn("{{ports[https][0]}}", line)
count = count_0 + count_1 + count_2 + count_3
return bool(count), line
def source_paths(chromium_root):
for dirpath, dirnames, filenames in os.walk(chromium_root):
if "chromium" in dirnames:
dirnames.remove("chromium")
for filename in filenames:
if filename.endswith("-expected.txt") or filename.endswith(".php"):
continue
yield os.path.relpath(os.path.join(dirpath, filename), chromium_root)
def do_subs(path, line):
depth = len(os.path.split(os.path.sep))
subs = [resources_path, python_to_php, testharness_helpers, service_worker_path, server_names]
file_is_template = False
for sub in subs:
added_template, line = sub(line, depth)
if added_template:
file_is_template = True
return file_is_template, line
def get_head(git):
return git("rev-parse", "HEAD")
def get_changes(git, path, old, new):
data = git("diff", "--name-status", "-z", "--no-renames", "%s..%s" % (old, new), "--", path)
items = data.split("\0")
rv = defaultdict(list)
for status, path in items:
rv[status].append(path)
return rv
def copy(src_path, out_dir, rel_path):
dest = os.path.normpath(os.path.join(out_dir, rel_path))
dest_dir = os.path.split(dest)[0]
if not os.path.exists(dest_dir):
os.makedirs(dest_dir)
shutil.copy2(src_path, dest)
def copy_local_files(local_files, out_root, tmp_dir):
for path in local_files:
rel_path = os.path.relpath(path, out_root)
copy(path, tmp_dir, rel_path)
def copy_extra_files(chromium_root, tmp_dir):
for in_path, rel_path in get_extra_files(chromium_root):
copy(in_path, tmp_dir, rel_path)
def sub_changed_filenames(filename_changes, f):
rv = []
for line in f:
for in_name, out_name in filename_changes.iteritems():
line = line.replace(in_name, out_name)
rv.append(line)
return "".join(rv)
testharness_re = re.compile("<script[^>]*src=[\"']?/resources/testharness.js[\"' ][^>]*>")
def is_top_level_test(path, data):
if os.path.splitext(path)[1] != ".html":
return False
for line in data:
if testharness_re.findall(line):
return True
return False
def add_suffix(path, suffix):
root, ext = os.path.splitext(path)
return root + ".%s" % suffix + ext
def main():
if "--cache-tests" in sys.argv:
sw_path = os.path.join("LayoutTests", "http", "tests", "cachestorage")
out_root = os.path.abspath(os.path.join(here, "..", "cache-storage"))
elif "--sw-tests" in sys.argv:
sw_path = os.path.join("LayoutTests", "http", "tests", "serviceworkers")
out_root = os.path.abspath(os.path.join(here, "..", "service-worker"))
else:
raise ValueError("Must supply either --cache-tests or --sw-tests")
chromium_root = os.path.abspath(sys.argv[1])
work_path = tempfile.mkdtemp()
test_path = os.path.join(chromium_root, sw_path)
local_files = glob.glob(os.path.normpath(os.path.join(here, "..", "resources", "*.py")))
if not os.path.exists(out_root):
os.mkdir(out_root)
copy_local_files(local_files, out_root, work_path)
copy_extra_files(chromium_root, work_path)
path_changes = {}
for path in source_paths(test_path):
out_path = os.path.join(work_path, path)
out_dir = os.path.dirname(out_path)
if not os.path.exists(out_dir):
os.makedirs(out_dir)
with open(os.path.join(test_path, path), "r") as in_f:
data = []
sub = False
for line in in_f:
sub_flag, output_line = do_subs(path, line)
data.append(output_line)
if sub_flag:
sub = True
is_test = is_top_level_test(out_path, data)
initial_path = out_path
if is_test:
path_1 = add_suffix(out_path, "https")
else:
path_1 = out_path
if sub:
path_2 = add_suffix(out_path, "sub")
else:
path_2 = path_1
if path_2 != initial_path:
path_changes[initial_path] = path_2
with open(path_2, "w") as out_f:
out_f.write("".join(data))
filename_changes = {}
for k, v in path_changes.iteritems():
if os.path.basename(k) in filename_changes:
print "Got duplicate name:" + os.path.basename(k)
filename_changes[os.path.basename(k)] = os.path.basename(v)
for path in source_paths(work_path):
full_path = os.path.join(work_path, path)
with open(full_path, "r") as f:
data = sub_changed_filenames(filename_changes, f)
with open(full_path, "w") as f:
f.write(data)
for dirpath, dirnames, filenames in os.walk(work_path):
for filename in filenames:
in_path = os.path.join(dirpath, filename)
rel_path = os.path.relpath(in_path, work_path)
copy(in_path, out_root, rel_path)
if __name__ == "__main__":
main()
|
mpl-2.0
|
keras-team/keras-io
|
examples/nlp/text_classification_from_scratch.py
|
1
|
8972
|
"""
Title: Text classification from scratch
Authors: Mark Omernick, Francois Chollet
Date created: 2019/11/06
Last modified: 2020/05/17
Description: Text sentiment classification starting from raw text files.
"""
"""
## Introduction
This example shows how to do text classification starting from raw text (as
a set of text files on disk). We demonstrate the workflow on the IMDB sentiment
classification dataset (unprocessed version). We use the `TextVectorization` layer for
word splitting & indexing.
"""
"""
## Setup
"""
import tensorflow as tf
import numpy as np
"""
## Load the data: IMDB movie review sentiment classification
Let's download the data and inspect its structure.
"""
"""shell
curl -O https://ai.stanford.edu/~amaas/data/sentiment/aclImdb_v1.tar.gz
tar -xf aclImdb_v1.tar.gz
"""
"""
The `aclImdb` folder contains a `train` and `test` subfolder:
"""
"""shell
ls aclImdb
"""
"""shell
ls aclImdb/test
"""
"""shell
ls aclImdb/train
"""
"""
The `aclImdb/train/pos` and `aclImdb/train/neg` folders contain text files, each of
which represents one review (either positive or negative):
"""
"""shell
cat aclImdb/train/pos/6248_7.txt
"""
"""
We are only interested in the `pos` and `neg` subfolders, so let's delete the rest:
"""
"""shell
rm -r aclImdb/train/unsup
"""
"""
You can use the utility `tf.keras.preprocessing.text_dataset_from_directory` to
generate a labeled `tf.data.Dataset` object from a set of text files on disk filed
into class-specific folders.
Let's use it to generate the training, validation, and test datasets. The validation
and training datasets are generated from two subsets of the `train` directory, with 20%
of samples going to the validation dataset and 80% going to the training dataset.
Having a validation dataset in addition to the test dataset is useful for tuning
hyperparameters, such as the model architecture, for which the test dataset should not
be used.
Before putting the model out into the real world however, it should be retrained using all
available training data (without creating a validation dataset), so its performance is maximized.
When using the `validation_split` & `subset` arguments, make sure to either specify a
random seed, or to pass `shuffle=False`, so that the validation & training splits you
get have no overlap.
"""
batch_size = 32
raw_train_ds = tf.keras.preprocessing.text_dataset_from_directory(
"aclImdb/train",
batch_size=batch_size,
validation_split=0.2,
subset="training",
seed=1337,
)
raw_val_ds = tf.keras.preprocessing.text_dataset_from_directory(
"aclImdb/train",
batch_size=batch_size,
validation_split=0.2,
subset="validation",
seed=1337,
)
raw_test_ds = tf.keras.preprocessing.text_dataset_from_directory(
"aclImdb/test", batch_size=batch_size
)
print(
"Number of batches in raw_train_ds: %d"
% tf.data.experimental.cardinality(raw_train_ds)
)
print(
"Number of batches in raw_val_ds: %d" % tf.data.experimental.cardinality(raw_val_ds)
)
print(
"Number of batches in raw_test_ds: %d"
% tf.data.experimental.cardinality(raw_test_ds)
)
"""
Let's preview a few samples:
"""
# It's important to take a look at your raw data to ensure your normalization
# and tokenization will work as expected. We can do that by taking a few
# examples from the training set and looking at them.
# This is one of the places where eager execution shines:
# we can just evaluate these tensors using .numpy()
# instead of needing to evaluate them in a Session/Graph context.
for text_batch, label_batch in raw_train_ds.take(1):
for i in range(5):
print(text_batch.numpy()[i])
print(label_batch.numpy()[i])
"""
## Prepare the data
In particular, we remove `<br />` tags.
"""
from tensorflow.keras.layers.experimental.preprocessing import TextVectorization
import string
import re
# Having looked at our data above, we see that the raw text contains HTML break
# tags of the form '<br />'. These tags will not be removed by the default
# standardizer (which doesn't strip HTML). Because of this, we will need to
# create a custom standardization function.
def custom_standardization(input_data):
lowercase = tf.strings.lower(input_data)
stripped_html = tf.strings.regex_replace(lowercase, "<br />", " ")
return tf.strings.regex_replace(
stripped_html, "[%s]" % re.escape(string.punctuation), ""
)
# Model constants.
max_features = 20000
embedding_dim = 128
sequence_length = 500
# Now that we have our custom standardization, we can instantiate our text
# vectorization layer. We are using this layer to normalize, split, and map
# strings to integers, so we set our 'output_mode' to 'int'.
# Note that we're using the default split function,
# and the custom standardization defined above.
# We also set an explicit maximum sequence length, since the CNNs later in our
# model won't support ragged sequences.
vectorize_layer = TextVectorization(
standardize=custom_standardization,
max_tokens=max_features,
output_mode="int",
output_sequence_length=sequence_length,
)
# Now that the vocab layer has been created, call `adapt` on a text-only
# dataset to create the vocabulary. You don't have to batch, but for very large
# datasets this means you're not keeping spare copies of the dataset in memory.
# Let's make a text-only dataset (no labels):
text_ds = raw_train_ds.map(lambda x, y: x)
# Let's call `adapt`:
vectorize_layer.adapt(text_ds)
"""
## Two options to vectorize the data
There are 2 ways we can use our text vectorization layer:
**Option 1: Make it part of the model**, so as to obtain a model that processes raw
strings, like this:
"""
"""
```python
text_input = tf.keras.Input(shape=(1,), dtype=tf.string, name='text')
x = vectorize_layer(text_input)
x = layers.Embedding(max_features + 1, embedding_dim)(x)
...
```
**Option 2: Apply it to the text dataset** to obtain a dataset of word indices, then
feed it into a model that expects integer sequences as inputs.
An important difference between the two is that option 2 enables you to do
**asynchronous CPU processing and buffering** of your data when training on GPU.
So if you're training the model on GPU, you probably want to go with this option to get
the best performance. This is what we will do below.
If we were to export our model to production, we'd ship a model that accepts raw
strings as input, like in the code snippet for option 1 above. This can be done after
training. We do this in the last section.
"""
def vectorize_text(text, label):
text = tf.expand_dims(text, -1)
return vectorize_layer(text), label
# Vectorize the data.
train_ds = raw_train_ds.map(vectorize_text)
val_ds = raw_val_ds.map(vectorize_text)
test_ds = raw_test_ds.map(vectorize_text)
# Do async prefetching / buffering of the data for best performance on GPU.
train_ds = train_ds.cache().prefetch(buffer_size=10)
val_ds = val_ds.cache().prefetch(buffer_size=10)
test_ds = test_ds.cache().prefetch(buffer_size=10)
"""
## Build a model
We choose a simple 1D convnet starting with an `Embedding` layer.
"""
from tensorflow.keras import layers
# A integer input for vocab indices.
inputs = tf.keras.Input(shape=(None,), dtype="int64")
# Next, we add a layer to map those vocab indices into a space of dimensionality
# 'embedding_dim'.
x = layers.Embedding(max_features, embedding_dim)(inputs)
x = layers.Dropout(0.5)(x)
# Conv1D + global max pooling
x = layers.Conv1D(128, 7, padding="valid", activation="relu", strides=3)(x)
x = layers.Conv1D(128, 7, padding="valid", activation="relu", strides=3)(x)
x = layers.GlobalMaxPooling1D()(x)
# We add a vanilla hidden layer:
x = layers.Dense(128, activation="relu")(x)
x = layers.Dropout(0.5)(x)
# We project onto a single unit output layer, and squash it with a sigmoid:
predictions = layers.Dense(1, activation="sigmoid", name="predictions")(x)
model = tf.keras.Model(inputs, predictions)
# Compile the model with binary crossentropy loss and an adam optimizer.
model.compile(loss="binary_crossentropy", optimizer="adam", metrics=["accuracy"])
"""
## Train the model
"""
epochs = 3
# Fit the model using the train and test datasets.
model.fit(train_ds, validation_data=val_ds, epochs=epochs)
"""
## Evaluate the model on the test set
"""
model.evaluate(test_ds)
"""
## Make an end-to-end model
If you want to obtain a model capable of processing raw strings, you can simply
create a new model (using the weights we just trained):
"""
# A string input
inputs = tf.keras.Input(shape=(1,), dtype="string")
# Turn strings into vocab indices
indices = vectorize_layer(inputs)
# Turn vocab indices into predictions
outputs = model(indices)
# Our end to end model
end_to_end_model = tf.keras.Model(inputs, outputs)
end_to_end_model.compile(
loss="binary_crossentropy", optimizer="adam", metrics=["accuracy"]
)
# Test it with `raw_test_ds`, which yields raw strings
end_to_end_model.evaluate(raw_test_ds)
|
apache-2.0
|
yang-g/grpc
|
src/python/grpcio/support.py
|
7
|
3941
|
# Copyright 2016 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import os.path
import shutil
import sys
import tempfile
from distutils import errors
import commands
C_PYTHON_DEV = """
#include <Python.h>
int main(int argc, char **argv) { return 0; }
"""
C_PYTHON_DEV_ERROR_MESSAGE = """
Could not find <Python.h>. This could mean the following:
* You're on Ubuntu and haven't run `apt-get install python-dev`.
* You're on RHEL/Fedora and haven't run `yum install python-devel` or
`dnf install python-devel` (make sure you also have redhat-rpm-config
installed)
* You're on Mac OS X and the usual Python framework was somehow corrupted
(check your environment variables or try re-installing?)
* You're on Windows and your Python installation was somehow corrupted
(check your environment variables or try re-installing?)
"""
C_CHECKS = {
C_PYTHON_DEV: C_PYTHON_DEV_ERROR_MESSAGE,
}
def _compile(compiler, source_string):
tempdir = tempfile.mkdtemp()
cpath = os.path.join(tempdir, 'a.c')
with open(cpath, 'w') as cfile:
cfile.write(source_string)
try:
compiler.compile([cpath])
except errors.CompileError as error:
return error
finally:
shutil.rmtree(tempdir)
def _expect_compile(compiler, source_string, error_message):
if _compile(compiler, source_string) is not None:
sys.stderr.write(error_message)
raise commands.CommandError(
"Diagnostics found a compilation environment issue:\n{}"
.format(error_message))
def diagnose_compile_error(build_ext, error):
"""Attempt to diagnose an error during compilation."""
for c_check, message in C_CHECKS.items():
_expect_compile(build_ext.compiler, c_check, message)
python_sources = [
source for source in build_ext.get_source_files()
if source.startswith('./src/python') and source.endswith('c')
]
for source in python_sources:
if not os.path.isfile(source):
raise commands.CommandError((
"Diagnostics found a missing Python extension source file:\n{}\n\n"
"This is usually because the Cython sources haven't been transpiled "
"into C yet and you're building from source.\n"
"Try setting the environment variable "
"`GRPC_PYTHON_BUILD_WITH_CYTHON=1` when invoking `setup.py` or "
"when using `pip`, e.g.:\n\n"
"pip install -rrequirements.txt\n"
"GRPC_PYTHON_BUILD_WITH_CYTHON=1 pip install .").format(source))
def diagnose_attribute_error(build_ext, error):
if any('_needs_stub' in arg for arg in error.args):
raise commands.CommandError(
"We expect a missing `_needs_stub` attribute from older versions of "
"setuptools. Consider upgrading setuptools.")
_ERROR_DIAGNOSES = {
errors.CompileError: diagnose_compile_error,
AttributeError: diagnose_attribute_error
}
def diagnose_build_ext_error(build_ext, error, formatted):
diagnostic = _ERROR_DIAGNOSES.get(type(error))
if diagnostic is None:
raise commands.CommandError(
"\n\nWe could not diagnose your build failure. Please file an issue at "
"http://www.github.com/grpc/grpc with `[Python install]` in the title."
"\n\n{}".format(formatted))
else:
diagnostic(build_ext, error)
|
apache-2.0
|
aahnne04/omaha
|
plugins/update/generate_plugin_idls.py
|
67
|
3325
|
#!/usr/bin/python2.4
#
# Copyright 2007-2009 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ========================================================================
"""
Generates IDL file for the OneClick ActiveX control from the passed-in IDL
template. The input template is a complete IDL file in all but one respect;
It has one replaceable entry for the CLSID for GoopdateOneClickControl.
We generate a GUID using UUIDGEN.EXE, and write out an IDL with a new CLSID.
"""
import sys
import os
import getopt
import commands
def _GetStatusOutput(cmd):
"""Return (status, output) of executing cmd in a shell."""
if os.name == "nt":
pipe = os.popen(cmd + " 2>&1", 'r')
text = pipe.read()
sts = pipe.close()
if sts is None: sts = 0
if text[-1:] == '\n': text = text[:-1]
return sts, text
else:
return commands.getstatusoutput(cmd)
def _GenerateIDLText(idl_template):
(status, guid) = _GetStatusOutput("uuidgen.exe")
if status != 0:
raise SystemExit("Failed to get GUID: %s" % guid)
return idl_template % guid
def _GenerateIDLFile(idl_template_filename, idl_output_filename):
f_in = open(idl_template_filename, 'r')
idl_template = f_in.read()
f_in.close()
idl_output = _GenerateIDLText(idl_template)
f_out = open(idl_output_filename, 'w')
f_out.write("""
// ** AUTOGENERATED FILE. DO NOT HAND-EDIT **
""")
f_out.write(idl_output)
f_out.close()
def _Usage():
"""Prints out script usage information."""
print """
generate_oneclick_idl.py: Write out the given IDL file.
Usage:
generate_oneclick_idl.py [--help
| --idl_template_file filename
--idl_output_file filename]
Options:
--help Show this information.
--idl_output_file filename Path/name of output IDL filename.
--idl_template_file filename Path/name of input IDL template.
"""
def _Main():
"""Generates IDL file."""
# use getopt to parse the option and argument list; this may raise, but
# don't catch it
_ARGUMENT_LIST = ["help", "idl_template_file=", "idl_output_file="]
(opts, args) = getopt.getopt(sys.argv[1:], "", _ARGUMENT_LIST)
if not opts or ("--help", "") in opts:
_Usage()
sys.exit()
idl_template_filename = ""
idl_output_filename = ""
for (o, v) in opts:
if o == "--idl_template_file":
idl_template_filename = v
if o == "--idl_output_file":
idl_output_filename = v
# make sure we have work to do
if not idl_template_filename:
raise SystemExit("no idl_template_filename specified")
if not idl_output_filename:
raise SystemExit("no idl_output_filename specified")
_GenerateIDLFile(idl_template_filename, idl_output_filename)
sys.exit()
if __name__ == "__main__":
_Main()
|
apache-2.0
|
evilhero/mylar
|
lib/bs4/testing.py
|
22
|
30829
|
"""Helper classes for tests."""
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
__license__ = "MIT"
import pickle
import copy
import functools
import unittest
from unittest import TestCase
from bs4 import BeautifulSoup
from bs4.element import (
CharsetMetaAttributeValue,
Comment,
ContentMetaAttributeValue,
Doctype,
SoupStrainer,
)
from bs4.builder import HTMLParserTreeBuilder
default_builder = HTMLParserTreeBuilder
class SoupTest(unittest.TestCase):
@property
def default_builder(self):
return default_builder()
def soup(self, markup, **kwargs):
"""Build a Beautiful Soup object from markup."""
builder = kwargs.pop('builder', self.default_builder)
return BeautifulSoup(markup, builder=builder, **kwargs)
def document_for(self, markup):
"""Turn an HTML fragment into a document.
The details depend on the builder.
"""
return self.default_builder.test_fragment_to_document(markup)
def assertSoupEquals(self, to_parse, compare_parsed_to=None):
builder = self.default_builder
obj = BeautifulSoup(to_parse, builder=builder)
if compare_parsed_to is None:
compare_parsed_to = to_parse
self.assertEqual(obj.decode(), self.document_for(compare_parsed_to))
def assertConnectedness(self, element):
"""Ensure that next_element and previous_element are properly
set for all descendants of the given element.
"""
earlier = None
for e in element.descendants:
if earlier:
self.assertEqual(e, earlier.next_element)
self.assertEqual(earlier, e.previous_element)
earlier = e
class HTMLTreeBuilderSmokeTest(object):
"""A basic test of a treebuilder's competence.
Any HTML treebuilder, present or future, should be able to pass
these tests. With invalid markup, there's room for interpretation,
and different parsers can handle it differently. But with the
markup in these tests, there's not much room for interpretation.
"""
def test_empty_element_tags(self):
"""Verify that all HTML4 and HTML5 empty element (aka void element) tags
are handled correctly.
"""
for name in [
'area', 'base', 'br', 'col', 'embed', 'hr', 'img', 'input', 'keygen', 'link', 'menuitem', 'meta', 'param', 'source', 'track', 'wbr',
'spacer', 'frame'
]:
soup = self.soup("")
new_tag = soup.new_tag(name)
self.assertEqual(True, new_tag.is_empty_element)
def test_pickle_and_unpickle_identity(self):
# Pickling a tree, then unpickling it, yields a tree identical
# to the original.
tree = self.soup("<a><b>foo</a>")
dumped = pickle.dumps(tree, 2)
loaded = pickle.loads(dumped)
self.assertEqual(loaded.__class__, BeautifulSoup)
self.assertEqual(loaded.decode(), tree.decode())
def assertDoctypeHandled(self, doctype_fragment):
"""Assert that a given doctype string is handled correctly."""
doctype_str, soup = self._document_with_doctype(doctype_fragment)
# Make sure a Doctype object was created.
doctype = soup.contents[0]
self.assertEqual(doctype.__class__, Doctype)
self.assertEqual(doctype, doctype_fragment)
self.assertEqual(str(soup)[:len(doctype_str)], doctype_str)
# Make sure that the doctype was correctly associated with the
# parse tree and that the rest of the document parsed.
self.assertEqual(soup.p.contents[0], 'foo')
def _document_with_doctype(self, doctype_fragment):
"""Generate and parse a document with the given doctype."""
doctype = '<!DOCTYPE %s>' % doctype_fragment
markup = doctype + '\n<p>foo</p>'
soup = self.soup(markup)
return doctype, soup
def test_normal_doctypes(self):
"""Make sure normal, everyday HTML doctypes are handled correctly."""
self.assertDoctypeHandled("html")
self.assertDoctypeHandled(
'html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"')
def test_empty_doctype(self):
soup = self.soup("<!DOCTYPE>")
doctype = soup.contents[0]
self.assertEqual("", doctype.strip())
def test_public_doctype_with_url(self):
doctype = 'html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd"'
self.assertDoctypeHandled(doctype)
def test_system_doctype(self):
self.assertDoctypeHandled('foo SYSTEM "http://www.example.com/"')
def test_namespaced_system_doctype(self):
# We can handle a namespaced doctype with a system ID.
self.assertDoctypeHandled('xsl:stylesheet SYSTEM "htmlent.dtd"')
def test_namespaced_public_doctype(self):
# Test a namespaced doctype with a public id.
self.assertDoctypeHandled('xsl:stylesheet PUBLIC "htmlent.dtd"')
def test_real_xhtml_document(self):
"""A real XHTML document should come out more or less the same as it went in."""
markup = b"""<?xml version="1.0" encoding="utf-8"?>
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN">
<html xmlns="http://www.w3.org/1999/xhtml">
<head><title>Hello.</title></head>
<body>Goodbye.</body>
</html>"""
soup = self.soup(markup)
self.assertEqual(
soup.encode("utf-8").replace(b"\n", b""),
markup.replace(b"\n", b""))
def test_processing_instruction(self):
# We test both Unicode and bytestring to verify that
# process_markup correctly sets processing_instruction_class
# even when the markup is already Unicode and there is no
# need to process anything.
markup = u"""<?PITarget PIContent?>"""
soup = self.soup(markup)
self.assertEqual(markup, soup.decode())
markup = b"""<?PITarget PIContent?>"""
soup = self.soup(markup)
self.assertEqual(markup, soup.encode("utf8"))
def test_deepcopy(self):
"""Make sure you can copy the tree builder.
This is important because the builder is part of a
BeautifulSoup object, and we want to be able to copy that.
"""
copy.deepcopy(self.default_builder)
def test_p_tag_is_never_empty_element(self):
"""A <p> tag is never designated as an empty-element tag.
Even if the markup shows it as an empty-element tag, it
shouldn't be presented that way.
"""
soup = self.soup("<p/>")
self.assertFalse(soup.p.is_empty_element)
self.assertEqual(str(soup.p), "<p></p>")
def test_unclosed_tags_get_closed(self):
"""A tag that's not closed by the end of the document should be closed.
This applies to all tags except empty-element tags.
"""
self.assertSoupEquals("<p>", "<p></p>")
self.assertSoupEquals("<b>", "<b></b>")
self.assertSoupEquals("<br>", "<br/>")
def test_br_is_always_empty_element_tag(self):
"""A <br> tag is designated as an empty-element tag.
Some parsers treat <br></br> as one <br/> tag, some parsers as
two tags, but it should always be an empty-element tag.
"""
soup = self.soup("<br></br>")
self.assertTrue(soup.br.is_empty_element)
self.assertEqual(str(soup.br), "<br/>")
def test_nested_formatting_elements(self):
self.assertSoupEquals("<em><em></em></em>")
def test_double_head(self):
html = '''<!DOCTYPE html>
<html>
<head>
<title>Ordinary HEAD element test</title>
</head>
<script type="text/javascript">
alert("Help!");
</script>
<body>
Hello, world!
</body>
</html>
'''
soup = self.soup(html)
self.assertEqual("text/javascript", soup.find('script')['type'])
def test_comment(self):
# Comments are represented as Comment objects.
markup = "<p>foo<!--foobar-->baz</p>"
self.assertSoupEquals(markup)
soup = self.soup(markup)
comment = soup.find(text="foobar")
self.assertEqual(comment.__class__, Comment)
# The comment is properly integrated into the tree.
foo = soup.find(text="foo")
self.assertEqual(comment, foo.next_element)
baz = soup.find(text="baz")
self.assertEqual(comment, baz.previous_element)
def test_preserved_whitespace_in_pre_and_textarea(self):
"""Whitespace must be preserved in <pre> and <textarea> tags,
even if that would mean not prettifying the markup.
"""
pre_markup = "<pre> </pre>"
textarea_markup = "<textarea> woo\nwoo </textarea>"
self.assertSoupEquals(pre_markup)
self.assertSoupEquals(textarea_markup)
soup = self.soup(pre_markup)
self.assertEqual(soup.pre.prettify(), pre_markup)
soup = self.soup(textarea_markup)
self.assertEqual(soup.textarea.prettify(), textarea_markup)
soup = self.soup("<textarea></textarea>")
self.assertEqual(soup.textarea.prettify(), "<textarea></textarea>")
def test_nested_inline_elements(self):
"""Inline elements can be nested indefinitely."""
b_tag = "<b>Inside a B tag</b>"
self.assertSoupEquals(b_tag)
nested_b_tag = "<p>A <i>nested <b>tag</b></i></p>"
self.assertSoupEquals(nested_b_tag)
double_nested_b_tag = "<p>A <a>doubly <i>nested <b>tag</b></i></a></p>"
self.assertSoupEquals(nested_b_tag)
def test_nested_block_level_elements(self):
"""Block elements can be nested."""
soup = self.soup('<blockquote><p><b>Foo</b></p></blockquote>')
blockquote = soup.blockquote
self.assertEqual(blockquote.p.b.string, 'Foo')
self.assertEqual(blockquote.b.string, 'Foo')
def test_correctly_nested_tables(self):
"""One table can go inside another one."""
markup = ('<table id="1">'
'<tr>'
"<td>Here's another table:"
'<table id="2">'
'<tr><td>foo</td></tr>'
'</table></td>')
self.assertSoupEquals(
markup,
'<table id="1"><tr><td>Here\'s another table:'
'<table id="2"><tr><td>foo</td></tr></table>'
'</td></tr></table>')
self.assertSoupEquals(
"<table><thead><tr><td>Foo</td></tr></thead>"
"<tbody><tr><td>Bar</td></tr></tbody>"
"<tfoot><tr><td>Baz</td></tr></tfoot></table>")
def test_deeply_nested_multivalued_attribute(self):
# html5lib can set the attributes of the same tag many times
# as it rearranges the tree. This has caused problems with
# multivalued attributes.
markup = '<table><div><div class="css"></div></div></table>'
soup = self.soup(markup)
self.assertEqual(["css"], soup.div.div['class'])
def test_multivalued_attribute_on_html(self):
# html5lib uses a different API to set the attributes ot the
# <html> tag. This has caused problems with multivalued
# attributes.
markup = '<html class="a b"></html>'
soup = self.soup(markup)
self.assertEqual(["a", "b"], soup.html['class'])
def test_angle_brackets_in_attribute_values_are_escaped(self):
self.assertSoupEquals('<a b="<a>"></a>', '<a b="<a>"></a>')
def test_entities_in_attributes_converted_to_unicode(self):
expect = u'<p id="pi\N{LATIN SMALL LETTER N WITH TILDE}ata"></p>'
self.assertSoupEquals('<p id="piñata"></p>', expect)
self.assertSoupEquals('<p id="piñata"></p>', expect)
self.assertSoupEquals('<p id="piñata"></p>', expect)
self.assertSoupEquals('<p id="piñata"></p>', expect)
def test_entities_in_text_converted_to_unicode(self):
expect = u'<p>pi\N{LATIN SMALL LETTER N WITH TILDE}ata</p>'
self.assertSoupEquals("<p>piñata</p>", expect)
self.assertSoupEquals("<p>piñata</p>", expect)
self.assertSoupEquals("<p>piñata</p>", expect)
self.assertSoupEquals("<p>piñata</p>", expect)
def test_quot_entity_converted_to_quotation_mark(self):
self.assertSoupEquals("<p>I said "good day!"</p>",
'<p>I said "good day!"</p>')
def test_out_of_range_entity(self):
expect = u"\N{REPLACEMENT CHARACTER}"
self.assertSoupEquals("�", expect)
self.assertSoupEquals("�", expect)
self.assertSoupEquals("�", expect)
def test_multipart_strings(self):
"Mostly to prevent a recurrence of a bug in the html5lib treebuilder."
soup = self.soup("<html><h2>\nfoo</h2><p></p></html>")
self.assertEqual("p", soup.h2.string.next_element.name)
self.assertEqual("p", soup.p.name)
self.assertConnectedness(soup)
def test_empty_element_tags(self):
"""Verify consistent handling of empty-element tags,
no matter how they come in through the markup.
"""
self.assertSoupEquals('<br/><br/><br/>', "<br/><br/><br/>")
self.assertSoupEquals('<br /><br /><br />', "<br/><br/><br/>")
def test_head_tag_between_head_and_body(self):
"Prevent recurrence of a bug in the html5lib treebuilder."
content = """<html><head></head>
<link></link>
<body>foo</body>
</html>
"""
soup = self.soup(content)
self.assertNotEqual(None, soup.html.body)
self.assertConnectedness(soup)
def test_multiple_copies_of_a_tag(self):
"Prevent recurrence of a bug in the html5lib treebuilder."
content = """<!DOCTYPE html>
<html>
<body>
<article id="a" >
<div><a href="1"></div>
<footer>
<a href="2"></a>
</footer>
</article>
</body>
</html>
"""
soup = self.soup(content)
self.assertConnectedness(soup.article)
def test_basic_namespaces(self):
"""Parsers don't need to *understand* namespaces, but at the
very least they should not choke on namespaces or lose
data."""
markup = b'<html xmlns="http://www.w3.org/1999/xhtml" xmlns:mathml="http://www.w3.org/1998/Math/MathML" xmlns:svg="http://www.w3.org/2000/svg"><head></head><body><mathml:msqrt>4</mathml:msqrt><b svg:fill="red"></b></body></html>'
soup = self.soup(markup)
self.assertEqual(markup, soup.encode())
html = soup.html
self.assertEqual('http://www.w3.org/1999/xhtml', soup.html['xmlns'])
self.assertEqual(
'http://www.w3.org/1998/Math/MathML', soup.html['xmlns:mathml'])
self.assertEqual(
'http://www.w3.org/2000/svg', soup.html['xmlns:svg'])
def test_multivalued_attribute_value_becomes_list(self):
markup = b'<a class="foo bar">'
soup = self.soup(markup)
self.assertEqual(['foo', 'bar'], soup.a['class'])
#
# Generally speaking, tests below this point are more tests of
# Beautiful Soup than tests of the tree builders. But parsers are
# weird, so we run these tests separately for every tree builder
# to detect any differences between them.
#
def test_can_parse_unicode_document(self):
# A seemingly innocuous document... but it's in Unicode! And
# it contains characters that can't be represented in the
# encoding found in the declaration! The horror!
markup = u'<html><head><meta encoding="euc-jp"></head><body>Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!</body>'
soup = self.soup(markup)
self.assertEqual(u'Sacr\xe9 bleu!', soup.body.string)
def test_soupstrainer(self):
"""Parsers should be able to work with SoupStrainers."""
strainer = SoupStrainer("b")
soup = self.soup("A <b>bold</b> <meta/> <i>statement</i>",
parse_only=strainer)
self.assertEqual(soup.decode(), "<b>bold</b>")
def test_single_quote_attribute_values_become_double_quotes(self):
self.assertSoupEquals("<foo attr='bar'></foo>",
'<foo attr="bar"></foo>')
def test_attribute_values_with_nested_quotes_are_left_alone(self):
text = """<foo attr='bar "brawls" happen'>a</foo>"""
self.assertSoupEquals(text)
def test_attribute_values_with_double_nested_quotes_get_quoted(self):
text = """<foo attr='bar "brawls" happen'>a</foo>"""
soup = self.soup(text)
soup.foo['attr'] = 'Brawls happen at "Bob\'s Bar"'
self.assertSoupEquals(
soup.foo.decode(),
"""<foo attr="Brawls happen at "Bob\'s Bar"">a</foo>""")
def test_ampersand_in_attribute_value_gets_escaped(self):
self.assertSoupEquals('<this is="really messed up & stuff"></this>',
'<this is="really messed up & stuff"></this>')
self.assertSoupEquals(
'<a href="http://example.org?a=1&b=2;3">foo</a>',
'<a href="http://example.org?a=1&b=2;3">foo</a>')
def test_escaped_ampersand_in_attribute_value_is_left_alone(self):
self.assertSoupEquals('<a href="http://example.org?a=1&b=2;3"></a>')
def test_entities_in_strings_converted_during_parsing(self):
# Both XML and HTML entities are converted to Unicode characters
# during parsing.
text = "<p><<sacré bleu!>></p>"
expected = u"<p><<sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!>></p>"
self.assertSoupEquals(text, expected)
def test_smart_quotes_converted_on_the_way_in(self):
# Microsoft smart quotes are converted to Unicode characters during
# parsing.
quote = b"<p>\x91Foo\x92</p>"
soup = self.soup(quote)
self.assertEqual(
soup.p.string,
u"\N{LEFT SINGLE QUOTATION MARK}Foo\N{RIGHT SINGLE QUOTATION MARK}")
def test_non_breaking_spaces_converted_on_the_way_in(self):
soup = self.soup("<a> </a>")
self.assertEqual(soup.a.string, u"\N{NO-BREAK SPACE}" * 2)
def test_entities_converted_on_the_way_out(self):
text = "<p><<sacré bleu!>></p>"
expected = u"<p><<sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!>></p>".encode("utf-8")
soup = self.soup(text)
self.assertEqual(soup.p.encode("utf-8"), expected)
def test_real_iso_latin_document(self):
# Smoke test of interrelated functionality, using an
# easy-to-understand document.
# Here it is in Unicode. Note that it claims to be in ISO-Latin-1.
unicode_html = u'<html><head><meta content="text/html; charset=ISO-Latin-1" http-equiv="Content-type"/></head><body><p>Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!</p></body></html>'
# That's because we're going to encode it into ISO-Latin-1, and use
# that to test.
iso_latin_html = unicode_html.encode("iso-8859-1")
# Parse the ISO-Latin-1 HTML.
soup = self.soup(iso_latin_html)
# Encode it to UTF-8.
result = soup.encode("utf-8")
# What do we expect the result to look like? Well, it would
# look like unicode_html, except that the META tag would say
# UTF-8 instead of ISO-Latin-1.
expected = unicode_html.replace("ISO-Latin-1", "utf-8")
# And, of course, it would be in UTF-8, not Unicode.
expected = expected.encode("utf-8")
# Ta-da!
self.assertEqual(result, expected)
def test_real_shift_jis_document(self):
# Smoke test to make sure the parser can handle a document in
# Shift-JIS encoding, without choking.
shift_jis_html = (
b'<html><head></head><body><pre>'
b'\x82\xb1\x82\xea\x82\xcdShift-JIS\x82\xc5\x83R\x81[\x83f'
b'\x83B\x83\x93\x83O\x82\xb3\x82\xea\x82\xbd\x93\xfa\x96{\x8c'
b'\xea\x82\xcc\x83t\x83@\x83C\x83\x8b\x82\xc5\x82\xb7\x81B'
b'</pre></body></html>')
unicode_html = shift_jis_html.decode("shift-jis")
soup = self.soup(unicode_html)
# Make sure the parse tree is correctly encoded to various
# encodings.
self.assertEqual(soup.encode("utf-8"), unicode_html.encode("utf-8"))
self.assertEqual(soup.encode("euc_jp"), unicode_html.encode("euc_jp"))
def test_real_hebrew_document(self):
# A real-world test to make sure we can convert ISO-8859-9 (a
# Hebrew encoding) to UTF-8.
hebrew_document = b'<html><head><title>Hebrew (ISO 8859-8) in Visual Directionality</title></head><body><h1>Hebrew (ISO 8859-8) in Visual Directionality</h1>\xed\xe5\xec\xf9</body></html>'
soup = self.soup(
hebrew_document, from_encoding="iso8859-8")
# Some tree builders call it iso8859-8, others call it iso-8859-9.
# That's not a difference we really care about.
assert soup.original_encoding in ('iso8859-8', 'iso-8859-8')
self.assertEqual(
soup.encode('utf-8'),
hebrew_document.decode("iso8859-8").encode("utf-8"))
def test_meta_tag_reflects_current_encoding(self):
# Here's the <meta> tag saying that a document is
# encoded in Shift-JIS.
meta_tag = ('<meta content="text/html; charset=x-sjis" '
'http-equiv="Content-type"/>')
# Here's a document incorporating that meta tag.
shift_jis_html = (
'<html><head>\n%s\n'
'<meta http-equiv="Content-language" content="ja"/>'
'</head><body>Shift-JIS markup goes here.') % meta_tag
soup = self.soup(shift_jis_html)
# Parse the document, and the charset is seemingly unaffected.
parsed_meta = soup.find('meta', {'http-equiv': 'Content-type'})
content = parsed_meta['content']
self.assertEqual('text/html; charset=x-sjis', content)
# But that value is actually a ContentMetaAttributeValue object.
self.assertTrue(isinstance(content, ContentMetaAttributeValue))
# And it will take on a value that reflects its current
# encoding.
self.assertEqual('text/html; charset=utf8', content.encode("utf8"))
# For the rest of the story, see TestSubstitutions in
# test_tree.py.
def test_html5_style_meta_tag_reflects_current_encoding(self):
# Here's the <meta> tag saying that a document is
# encoded in Shift-JIS.
meta_tag = ('<meta id="encoding" charset="x-sjis" />')
# Here's a document incorporating that meta tag.
shift_jis_html = (
'<html><head>\n%s\n'
'<meta http-equiv="Content-language" content="ja"/>'
'</head><body>Shift-JIS markup goes here.') % meta_tag
soup = self.soup(shift_jis_html)
# Parse the document, and the charset is seemingly unaffected.
parsed_meta = soup.find('meta', id="encoding")
charset = parsed_meta['charset']
self.assertEqual('x-sjis', charset)
# But that value is actually a CharsetMetaAttributeValue object.
self.assertTrue(isinstance(charset, CharsetMetaAttributeValue))
# And it will take on a value that reflects its current
# encoding.
self.assertEqual('utf8', charset.encode("utf8"))
def test_tag_with_no_attributes_can_have_attributes_added(self):
data = self.soup("<a>text</a>")
data.a['foo'] = 'bar'
self.assertEqual('<a foo="bar">text</a>', data.a.decode())
class XMLTreeBuilderSmokeTest(object):
def test_pickle_and_unpickle_identity(self):
# Pickling a tree, then unpickling it, yields a tree identical
# to the original.
tree = self.soup("<a><b>foo</a>")
dumped = pickle.dumps(tree, 2)
loaded = pickle.loads(dumped)
self.assertEqual(loaded.__class__, BeautifulSoup)
self.assertEqual(loaded.decode(), tree.decode())
def test_docstring_generated(self):
soup = self.soup("<root/>")
self.assertEqual(
soup.encode(), b'<?xml version="1.0" encoding="utf-8"?>\n<root/>')
def test_xml_declaration(self):
markup = b"""<?xml version="1.0" encoding="utf8"?>\n<foo/>"""
soup = self.soup(markup)
self.assertEqual(markup, soup.encode("utf8"))
def test_processing_instruction(self):
markup = b"""<?xml version="1.0" encoding="utf8"?>\n<?PITarget PIContent?>"""
soup = self.soup(markup)
self.assertEqual(markup, soup.encode("utf8"))
def test_real_xhtml_document(self):
"""A real XHTML document should come out *exactly* the same as it went in."""
markup = b"""<?xml version="1.0" encoding="utf-8"?>
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN">
<html xmlns="http://www.w3.org/1999/xhtml">
<head><title>Hello.</title></head>
<body>Goodbye.</body>
</html>"""
soup = self.soup(markup)
self.assertEqual(
soup.encode("utf-8"), markup)
def test_formatter_processes_script_tag_for_xml_documents(self):
doc = """
<script type="text/javascript">
</script>
"""
soup = BeautifulSoup(doc, "lxml-xml")
# lxml would have stripped this while parsing, but we can add
# it later.
soup.script.string = 'console.log("< < hey > > ");'
encoded = soup.encode()
self.assertTrue(b"< < hey > >" in encoded)
def test_can_parse_unicode_document(self):
markup = u'<?xml version="1.0" encoding="euc-jp"><root>Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!</root>'
soup = self.soup(markup)
self.assertEqual(u'Sacr\xe9 bleu!', soup.root.string)
def test_popping_namespaced_tag(self):
markup = '<rss xmlns:dc="foo"><dc:creator>b</dc:creator><dc:date>2012-07-02T20:33:42Z</dc:date><dc:rights>c</dc:rights></rss>'
soup = self.soup(markup)
self.assertEqual(
unicode(soup.rss), markup)
def test_docstring_includes_correct_encoding(self):
soup = self.soup("<root/>")
self.assertEqual(
soup.encode("latin1"),
b'<?xml version="1.0" encoding="latin1"?>\n<root/>')
def test_large_xml_document(self):
"""A large XML document should come out the same as it went in."""
markup = (b'<?xml version="1.0" encoding="utf-8"?>\n<root>'
+ b'0' * (2**12)
+ b'</root>')
soup = self.soup(markup)
self.assertEqual(soup.encode("utf-8"), markup)
def test_tags_are_empty_element_if_and_only_if_they_are_empty(self):
self.assertSoupEquals("<p>", "<p/>")
self.assertSoupEquals("<p>foo</p>")
def test_namespaces_are_preserved(self):
markup = '<root xmlns:a="http://example.com/" xmlns:b="http://example.net/"><a:foo>This tag is in the a namespace</a:foo><b:foo>This tag is in the b namespace</b:foo></root>'
soup = self.soup(markup)
root = soup.root
self.assertEqual("http://example.com/", root['xmlns:a'])
self.assertEqual("http://example.net/", root['xmlns:b'])
def test_closing_namespaced_tag(self):
markup = '<p xmlns:dc="http://purl.org/dc/elements/1.1/"><dc:date>20010504</dc:date></p>'
soup = self.soup(markup)
self.assertEqual(unicode(soup.p), markup)
def test_namespaced_attributes(self):
markup = '<foo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"><bar xsi:schemaLocation="http://www.example.com"/></foo>'
soup = self.soup(markup)
self.assertEqual(unicode(soup.foo), markup)
def test_namespaced_attributes_xml_namespace(self):
markup = '<foo xml:lang="fr">bar</foo>'
soup = self.soup(markup)
self.assertEqual(unicode(soup.foo), markup)
def test_find_by_prefixed_name(self):
doc = """<?xml version="1.0" encoding="utf-8"?>
<Document xmlns="http://example.com/ns0"
xmlns:ns1="http://example.com/ns1"
xmlns:ns2="http://example.com/ns2"
<ns1:tag>foo</ns1:tag>
<ns1:tag>bar</ns1:tag>
<ns2:tag key="value">baz</ns2:tag>
</Document>
"""
soup = self.soup(doc)
# There are three <tag> tags.
self.assertEqual(3, len(soup.find_all('tag')))
# But two of them are ns1:tag and one of them is ns2:tag.
self.assertEqual(2, len(soup.find_all('ns1:tag')))
self.assertEqual(1, len(soup.find_all('ns2:tag')))
self.assertEqual(1, len(soup.find_all('ns2:tag', key='value')))
self.assertEqual(3, len(soup.find_all(['ns1:tag', 'ns2:tag'])))
def test_copy_tag_preserves_namespace(self):
xml = """<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
<w:document xmlns:w="http://example.com/ns0"/>"""
soup = self.soup(xml)
tag = soup.document
duplicate = copy.copy(tag)
# The two tags have the same namespace prefix.
self.assertEqual(tag.prefix, duplicate.prefix)
class HTML5TreeBuilderSmokeTest(HTMLTreeBuilderSmokeTest):
"""Smoke test for a tree builder that supports HTML5."""
def test_real_xhtml_document(self):
# Since XHTML is not HTML5, HTML5 parsers are not tested to handle
# XHTML documents in any particular way.
pass
def test_html_tags_have_namespace(self):
markup = "<a>"
soup = self.soup(markup)
self.assertEqual("http://www.w3.org/1999/xhtml", soup.a.namespace)
def test_svg_tags_have_namespace(self):
markup = '<svg><circle/></svg>'
soup = self.soup(markup)
namespace = "http://www.w3.org/2000/svg"
self.assertEqual(namespace, soup.svg.namespace)
self.assertEqual(namespace, soup.circle.namespace)
def test_mathml_tags_have_namespace(self):
markup = '<math><msqrt>5</msqrt></math>'
soup = self.soup(markup)
namespace = 'http://www.w3.org/1998/Math/MathML'
self.assertEqual(namespace, soup.math.namespace)
self.assertEqual(namespace, soup.msqrt.namespace)
def test_xml_declaration_becomes_comment(self):
markup = '<?xml version="1.0" encoding="utf-8"?><html></html>'
soup = self.soup(markup)
self.assertTrue(isinstance(soup.contents[0], Comment))
self.assertEqual(soup.contents[0], '?xml version="1.0" encoding="utf-8"?')
self.assertEqual("html", soup.contents[0].next_element.name)
def skipIf(condition, reason):
def nothing(test, *args, **kwargs):
return None
def decorator(test_item):
if condition:
return nothing
else:
return test_item
return decorator
|
gpl-3.0
|
marcok/odoo_modules
|
hr_employee_time_clock/models/hr_contract.py
|
1
|
8125
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2016 - now Bytebrand Outsourcing AG (<http://www.bytebrand.net>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from odoo import api, fields, models, _
from dateutil import rrule, parser
import logging
_logger = logging.getLogger(__name__)
class HrContract(models.Model):
"""
Addition plugin for HR timesheet for work with duty hours
"""
_inherit = 'hr.contract'
rate_per_hour = fields.Boolean(string="Use hour rate")
@api.multi
def write(self, values):
for contract in self:
old_date_start = contract.date_start
old_date_end = contract.date_end
old_state = contract.state
analytic_pool = self.env['employee.attendance.analytic']
res = super(HrContract, self).write(values)
if values.get('state') in ('open', 'pending', 'close') \
and old_state in ('draft', 'cancel'):
contract.attach_attendance()
return res
elif values.get('state') == 'cancel':
lines = analytic_pool.search(
[('contract_id', '=', contract.id)])
employee = contract.employee_id
if lines:
contract.remove_from_attendance(lines, employee)
return res
if values.get('resource_calendar_id') \
or 'rate_per_hour' in values.keys():
lines = analytic_pool.search(
[('contract_id', '=', contract.id)])
if lines:
for line in lines:
date_from = str(line.name) + ' 00:00:00'
dates = list(rrule.rrule(
rrule.DAILY,
dtstart=parser.parse(date_from),
until=parser.parse(date_from)))
for date_line in dates:
analytic_pool.recalculate_line(
line_date=str(date_line),
employee_id=contract.employee_id)
if values.get('date_end'):
if old_date_end:
dates = calculate_days(old_date_end, values.get('date_end'))
for date_line in dates:
analytic_pool.recalculate_line(
line_date=str(date_line),
employee_id=contract.employee_id)
else:
lines = analytic_pool.search(
[('contract_id', '=', contract.id),
('attendance_date', '>', values.get('date_end'))])
if lines:
dates = list(rrule.rrule(
rrule.DAILY,
dtstart=parser.parse(values.get('date_end')),
until=parser.parse(lines[-1].name)))
for date_line in dates:
analytic_pool.recalculate_line(
line_date=str(date_line),
employee_id=contract.employee_id)
elif 'date_end' in values.keys():
line = analytic_pool.search(
[('contract_id', '=', contract.id),
('attendance_date', '=', old_date_end)])
lines = analytic_pool.search(
[('sheet_id', '=', line.sheet_id.id),
('attendance_date', '>', old_date_end)])
if lines:
dates = list(rrule.rrule(
rrule.DAILY,
dtstart=parser.parse(old_date_end),
until=parser.parse(lines[-1].name)))
for date_line in dates:
analytic_pool.recalculate_line(
line_date=str(date_line),
employee_id=contract.employee_id)
if values.get('date_start'):
dates = calculate_days(old_date_start, values.get('date_start'))
for date_line in dates:
analytic_pool.recalculate_line(
line_date=str(date_line),
employee_id=contract.employee_id)
return res
@api.multi
def attach_attendance(self):
date_start = self.date_start
date_end = self.date_end
analytic_pool = self.env['employee.attendance.analytic']
sheets = self.env['hr_timesheet_sheet.sheet'].search(
[('employee_id', '=', self.employee_id.id)])
if sheets:
if not date_end:
lines = analytic_pool.search(
[('contract_id', '=', False),
('sheet_id', 'in', sheets.ids), ])
for line in lines:
date_1 = fields.Datetime.from_string(date_start)
date_2 = fields.Datetime.from_string(line.name)
if date_1 <= date_2:
analytic_pool.recalculate_line(
line_date=line.name,
employee_id=self.employee_id)
else:
date_1 = fields.Datetime.from_string(date_start)
date_2 = fields.Datetime.from_string(date_end)
lines = analytic_pool.search(
[('contract_id', '=', False),
('sheet_id', 'in', sheets.ids),
('attendance_date', '>=', date_1),
('attendance_date', '<=', date_2)])
for line in lines:
analytic_pool.recalculate_line(
line_date=line.name,
employee_id=self.employee_id)
@api.multi
def remove_from_attendance(self, lines, employee):
analytic_pool = self.env['employee.attendance.analytic']
for line in lines:
date_from = str(line.name) + ' 00:00:00'
dates = list(rrule.rrule(
rrule.DAILY,
dtstart=parser.parse(date_from),
until=parser.parse(date_from)))
for date_line in dates:
analytic_pool.recalculate_line(
line_date=str(date_line), employee_id=employee)
@api.multi
def unlink(self):
analytic_pool = self.env['employee.attendance.analytic']
lines = analytic_pool.search(
[('contract_id', '=', self.id)])
employee = self.employee_id
res = super(HrContract, self).unlink()
if lines:
self.remove_from_attendance(lines, employee)
return res
def calculate_days(date_start, date_end):
old_date_1 = fields.Datetime.from_string(date_start)
old_date_2 = fields.Datetime.from_string(date_end)
if old_date_1 > old_date_2:
dates = list(rrule.rrule(
rrule.DAILY,
dtstart=parser.parse(str(date_end)),
until=parser.parse(str(date_start))))
else:
dates = list(rrule.rrule(
rrule.DAILY,
dtstart=parser.parse(str(date_start)),
until=parser.parse(str(date_end))))
return dates
|
agpl-3.0
|
savoirfairelinux/django
|
tests/basic/tests.py
|
5
|
29056
|
import threading
from datetime import datetime, timedelta
from django.core.exceptions import MultipleObjectsReturned, ObjectDoesNotExist
from django.db import DEFAULT_DB_ALIAS, DatabaseError, connections
from django.db.models.manager import BaseManager
from django.db.models.query import EmptyQuerySet, QuerySet
from django.test import (
SimpleTestCase, TestCase, TransactionTestCase, skipIfDBFeature,
skipUnlessDBFeature,
)
from django.utils.translation import gettext_lazy
from .models import Article, ArticleSelectOnSave, SelfRef
class ModelInstanceCreationTests(TestCase):
def test_object_is_not_written_to_database_until_save_was_called(self):
a = Article(
id=None,
headline='Parrot programs in Python',
pub_date=datetime(2005, 7, 28),
)
self.assertIsNone(a.id)
self.assertEqual(Article.objects.all().count(), 0)
# Save it into the database. You have to call save() explicitly.
a.save()
self.assertIsNotNone(a.id)
self.assertEqual(Article.objects.all().count(), 1)
def test_can_initialize_model_instance_using_positional_arguments(self):
"""
You can initialize a model instance using positional arguments,
which should match the field order as defined in the model.
"""
a = Article(None, 'Second article', datetime(2005, 7, 29))
a.save()
self.assertEqual(a.headline, 'Second article')
self.assertEqual(a.pub_date, datetime(2005, 7, 29, 0, 0))
def test_can_create_instance_using_kwargs(self):
a = Article(
id=None,
headline='Third article',
pub_date=datetime(2005, 7, 30),
)
a.save()
self.assertEqual(a.headline, 'Third article')
self.assertEqual(a.pub_date, datetime(2005, 7, 30, 0, 0))
def test_autofields_generate_different_values_for_each_instance(self):
a1 = Article.objects.create(headline='First', pub_date=datetime(2005, 7, 30, 0, 0))
a2 = Article.objects.create(headline='First', pub_date=datetime(2005, 7, 30, 0, 0))
a3 = Article.objects.create(headline='First', pub_date=datetime(2005, 7, 30, 0, 0))
self.assertNotEqual(a3.id, a1.id)
self.assertNotEqual(a3.id, a2.id)
def test_can_mix_and_match_position_and_kwargs(self):
# You can also mix and match position and keyword arguments, but
# be sure not to duplicate field information.
a = Article(None, 'Fourth article', pub_date=datetime(2005, 7, 31))
a.save()
self.assertEqual(a.headline, 'Fourth article')
def test_cannot_create_instance_with_invalid_kwargs(self):
with self.assertRaisesMessage(TypeError, "'foo' is an invalid keyword argument for this function"):
Article(
id=None,
headline='Some headline',
pub_date=datetime(2005, 7, 31),
foo='bar',
)
def test_can_leave_off_value_for_autofield_and_it_gets_value_on_save(self):
"""
You can leave off the value for an AutoField when creating an
object, because it'll get filled in automatically when you save().
"""
a = Article(headline='Article 5', pub_date=datetime(2005, 7, 31))
a.save()
self.assertEqual(a.headline, 'Article 5')
self.assertIsNotNone(a.id)
def test_leaving_off_a_field_with_default_set_the_default_will_be_saved(self):
a = Article(pub_date=datetime(2005, 7, 31))
a.save()
self.assertEqual(a.headline, 'Default headline')
def test_for_datetimefields_saves_as_much_precision_as_was_given(self):
"""as much precision in *seconds*"""
a1 = Article(
headline='Article 7',
pub_date=datetime(2005, 7, 31, 12, 30),
)
a1.save()
self.assertEqual(Article.objects.get(id__exact=a1.id).pub_date, datetime(2005, 7, 31, 12, 30))
a2 = Article(
headline='Article 8',
pub_date=datetime(2005, 7, 31, 12, 30, 45),
)
a2.save()
self.assertEqual(Article.objects.get(id__exact=a2.id).pub_date, datetime(2005, 7, 31, 12, 30, 45))
def test_saving_an_object_again_does_not_create_a_new_object(self):
a = Article(headline='original', pub_date=datetime(2014, 5, 16))
a.save()
current_id = a.id
a.save()
self.assertEqual(a.id, current_id)
a.headline = 'Updated headline'
a.save()
self.assertEqual(a.id, current_id)
def test_querysets_checking_for_membership(self):
headlines = [
'Parrot programs in Python', 'Second article', 'Third article']
some_pub_date = datetime(2014, 5, 16, 12, 1)
for headline in headlines:
Article(headline=headline, pub_date=some_pub_date).save()
a = Article(headline='Some headline', pub_date=some_pub_date)
a.save()
# You can use 'in' to test for membership...
self.assertIn(a, Article.objects.all())
# ... but there will often be more efficient ways if that is all you need:
self.assertTrue(Article.objects.filter(id=a.id).exists())
class ModelTest(TestCase):
def test_objects_attribute_is_only_available_on_the_class_itself(self):
with self.assertRaisesMessage(AttributeError, "Manager isn't accessible via Article instances"):
getattr(Article(), "objects",)
self.assertFalse(hasattr(Article(), 'objects'))
self.assertTrue(hasattr(Article, 'objects'))
def test_queryset_delete_removes_all_items_in_that_queryset(self):
headlines = [
'An article', 'Article One', 'Amazing article', 'Boring article']
some_pub_date = datetime(2014, 5, 16, 12, 1)
for headline in headlines:
Article(headline=headline, pub_date=some_pub_date).save()
self.assertQuerysetEqual(
Article.objects.all().order_by('headline'),
["<Article: Amazing article>",
"<Article: An article>",
"<Article: Article One>",
"<Article: Boring article>"]
)
Article.objects.filter(headline__startswith='A').delete()
self.assertQuerysetEqual(Article.objects.all().order_by('headline'), ["<Article: Boring article>"])
def test_not_equal_and_equal_operators_behave_as_expected_on_instances(self):
some_pub_date = datetime(2014, 5, 16, 12, 1)
a1 = Article.objects.create(headline='First', pub_date=some_pub_date)
a2 = Article.objects.create(headline='Second', pub_date=some_pub_date)
self.assertNotEqual(a1, a2)
self.assertEqual(a1, Article.objects.get(id__exact=a1.id))
self.assertNotEqual(Article.objects.get(id__exact=a1.id), Article.objects.get(id__exact=a2.id))
@skipUnlessDBFeature('supports_microsecond_precision')
def test_microsecond_precision(self):
# In PostgreSQL, microsecond-level precision is available.
a9 = Article(
headline='Article 9',
pub_date=datetime(2005, 7, 31, 12, 30, 45, 180),
)
a9.save()
self.assertEqual(Article.objects.get(pk=a9.pk).pub_date, datetime(2005, 7, 31, 12, 30, 45, 180))
@skipIfDBFeature('supports_microsecond_precision')
def test_microsecond_precision_not_supported(self):
# In MySQL, microsecond-level precision isn't always available. You'll
# lose microsecond-level precision once the data is saved.
a9 = Article(
headline='Article 9',
pub_date=datetime(2005, 7, 31, 12, 30, 45, 180),
)
a9.save()
self.assertEqual(
Article.objects.get(id__exact=a9.id).pub_date,
datetime(2005, 7, 31, 12, 30, 45),
)
@skipIfDBFeature('supports_microsecond_precision')
def test_microsecond_precision_not_supported_edge_case(self):
# In MySQL, microsecond-level precision isn't always available. You'll
# lose microsecond-level precision once the data is saved.
a = Article.objects.create(
headline='Article',
pub_date=datetime(2008, 12, 31, 23, 59, 59, 999999),
)
self.assertEqual(
Article.objects.get(pk=a.pk).pub_date,
datetime(2008, 12, 31, 23, 59, 59),
)
def test_manually_specify_primary_key(self):
# You can manually specify the primary key when creating a new object.
a101 = Article(
id=101,
headline='Article 101',
pub_date=datetime(2005, 7, 31, 12, 30, 45),
)
a101.save()
a101 = Article.objects.get(pk=101)
self.assertEqual(a101.headline, 'Article 101')
def test_create_method(self):
# You can create saved objects in a single step
a10 = Article.objects.create(
headline="Article 10",
pub_date=datetime(2005, 7, 31, 12, 30, 45),
)
self.assertEqual(Article.objects.get(headline="Article 10"), a10)
def test_year_lookup_edge_case(self):
# Edge-case test: A year lookup should retrieve all objects in
# the given year, including Jan. 1 and Dec. 31.
Article.objects.create(
headline='Article 11',
pub_date=datetime(2008, 1, 1),
)
Article.objects.create(
headline='Article 12',
pub_date=datetime(2008, 12, 31, 23, 59, 59, 999999),
)
self.assertQuerysetEqual(
Article.objects.filter(pub_date__year=2008),
["<Article: Article 11>", "<Article: Article 12>"]
)
def test_unicode_data(self):
# Unicode data works, too.
a = Article(
headline='\u6797\u539f \u3081\u3050\u307f',
pub_date=datetime(2005, 7, 28),
)
a.save()
self.assertEqual(Article.objects.get(pk=a.id).headline, '\u6797\u539f \u3081\u3050\u307f')
def test_hash_function(self):
# Model instances have a hash function, so they can be used in sets
# or as dictionary keys. Two models compare as equal if their primary
# keys are equal.
a10 = Article.objects.create(
headline="Article 10",
pub_date=datetime(2005, 7, 31, 12, 30, 45),
)
a11 = Article.objects.create(
headline='Article 11',
pub_date=datetime(2008, 1, 1),
)
a12 = Article.objects.create(
headline='Article 12',
pub_date=datetime(2008, 12, 31, 23, 59, 59, 999999),
)
s = {a10, a11, a12}
self.assertIn(Article.objects.get(headline='Article 11'), s)
def test_extra_method_select_argument_with_dashes_and_values(self):
# The 'select' argument to extra() supports names with dashes in
# them, as long as you use values().
Article.objects.create(
headline="Article 10",
pub_date=datetime(2005, 7, 31, 12, 30, 45),
)
Article.objects.create(
headline='Article 11',
pub_date=datetime(2008, 1, 1),
)
Article.objects.create(
headline='Article 12',
pub_date=datetime(2008, 12, 31, 23, 59, 59, 999999),
)
dicts = Article.objects.filter(
pub_date__year=2008).extra(
select={'dashed-value': '1'}).values('headline', 'dashed-value')
self.assertEqual(
[sorted(d.items()) for d in dicts],
[[('dashed-value', 1), ('headline', 'Article 11')], [('dashed-value', 1), ('headline', 'Article 12')]]
)
def test_extra_method_select_argument_with_dashes(self):
# If you use 'select' with extra() and names containing dashes on a
# query that's *not* a values() query, those extra 'select' values
# will silently be ignored.
Article.objects.create(
headline="Article 10",
pub_date=datetime(2005, 7, 31, 12, 30, 45),
)
Article.objects.create(
headline='Article 11',
pub_date=datetime(2008, 1, 1),
)
Article.objects.create(
headline='Article 12',
pub_date=datetime(2008, 12, 31, 23, 59, 59, 999999),
)
articles = Article.objects.filter(
pub_date__year=2008).extra(select={'dashed-value': '1', 'undashedvalue': '2'})
self.assertEqual(articles[0].undashedvalue, 2)
def test_create_relation_with_gettext_lazy(self):
"""
gettext_lazy objects work when saving model instances
through various methods. Refs #10498.
"""
notlazy = 'test'
lazy = gettext_lazy(notlazy)
Article.objects.create(headline=lazy, pub_date=datetime.now())
article = Article.objects.get()
self.assertEqual(article.headline, notlazy)
# test that assign + save works with Promise objects
article.headline = lazy
article.save()
self.assertEqual(article.headline, notlazy)
# test .update()
Article.objects.update(headline=lazy)
article = Article.objects.get()
self.assertEqual(article.headline, notlazy)
# still test bulk_create()
Article.objects.all().delete()
Article.objects.bulk_create([Article(headline=lazy, pub_date=datetime.now())])
article = Article.objects.get()
self.assertEqual(article.headline, notlazy)
def test_emptyqs(self):
# Can't be instantiated
with self.assertRaises(TypeError):
EmptyQuerySet()
self.assertIsInstance(Article.objects.none(), EmptyQuerySet)
self.assertNotIsInstance('', EmptyQuerySet)
def test_emptyqs_values(self):
# test for #15959
Article.objects.create(headline='foo', pub_date=datetime.now())
with self.assertNumQueries(0):
qs = Article.objects.none().values_list('pk')
self.assertIsInstance(qs, EmptyQuerySet)
self.assertEqual(len(qs), 0)
def test_emptyqs_customqs(self):
# A hacky test for custom QuerySet subclass - refs #17271
Article.objects.create(headline='foo', pub_date=datetime.now())
class CustomQuerySet(QuerySet):
def do_something(self):
return 'did something'
qs = Article.objects.all()
qs.__class__ = CustomQuerySet
qs = qs.none()
with self.assertNumQueries(0):
self.assertEqual(len(qs), 0)
self.assertIsInstance(qs, EmptyQuerySet)
self.assertEqual(qs.do_something(), 'did something')
def test_emptyqs_values_order(self):
# Tests for ticket #17712
Article.objects.create(headline='foo', pub_date=datetime.now())
with self.assertNumQueries(0):
self.assertEqual(len(Article.objects.none().values_list('id').order_by('id')), 0)
with self.assertNumQueries(0):
self.assertEqual(len(Article.objects.none().filter(
id__in=Article.objects.values_list('id', flat=True))), 0)
@skipUnlessDBFeature('can_distinct_on_fields')
def test_emptyqs_distinct(self):
# Tests for #19426
Article.objects.create(headline='foo', pub_date=datetime.now())
with self.assertNumQueries(0):
self.assertEqual(len(Article.objects.none().distinct('headline', 'pub_date')), 0)
def test_ticket_20278(self):
sr = SelfRef.objects.create()
with self.assertRaises(ObjectDoesNotExist):
SelfRef.objects.get(selfref=sr)
def test_eq(self):
self.assertEqual(Article(id=1), Article(id=1))
self.assertNotEqual(Article(id=1), object())
self.assertNotEqual(object(), Article(id=1))
a = Article()
self.assertEqual(a, a)
self.assertNotEqual(Article(), a)
def test_hash(self):
# Value based on PK
self.assertEqual(hash(Article(id=1)), hash(1))
with self.assertRaises(TypeError):
# No PK value -> unhashable (because save() would then change
# hash)
hash(Article())
def test_delete_and_access_field(self):
# Accessing a field after it's deleted from a model reloads its value.
pub_date = datetime.now()
article = Article.objects.create(headline='foo', pub_date=pub_date)
new_pub_date = article.pub_date + timedelta(days=10)
article.headline = 'bar'
article.pub_date = new_pub_date
del article.headline
with self.assertNumQueries(1):
self.assertEqual(article.headline, 'foo')
# Fields that weren't deleted aren't reloaded.
self.assertEqual(article.pub_date, new_pub_date)
class ModelLookupTest(TestCase):
def setUp(self):
# Create an Article.
self.a = Article(
id=None,
headline='Swallow programs in Python',
pub_date=datetime(2005, 7, 28),
)
# Save it into the database. You have to call save() explicitly.
self.a.save()
def test_all_lookup(self):
# Change values by changing the attributes, then calling save().
self.a.headline = 'Parrot programs in Python'
self.a.save()
# Article.objects.all() returns all the articles in the database.
self.assertQuerysetEqual(Article.objects.all(), ['<Article: Parrot programs in Python>'])
def test_rich_lookup(self):
# Django provides a rich database lookup API.
self.assertEqual(Article.objects.get(id__exact=self.a.id), self.a)
self.assertEqual(Article.objects.get(headline__startswith='Swallow'), self.a)
self.assertEqual(Article.objects.get(pub_date__year=2005), self.a)
self.assertEqual(Article.objects.get(pub_date__year=2005, pub_date__month=7), self.a)
self.assertEqual(Article.objects.get(pub_date__year=2005, pub_date__month=7, pub_date__day=28), self.a)
self.assertEqual(Article.objects.get(pub_date__week_day=5), self.a)
def test_equal_lookup(self):
# The "__exact" lookup type can be omitted, as a shortcut.
self.assertEqual(Article.objects.get(id=self.a.id), self.a)
self.assertEqual(Article.objects.get(headline='Swallow programs in Python'), self.a)
self.assertQuerysetEqual(
Article.objects.filter(pub_date__year=2005),
['<Article: Swallow programs in Python>'],
)
self.assertQuerysetEqual(
Article.objects.filter(pub_date__year=2004),
[],
)
self.assertQuerysetEqual(
Article.objects.filter(pub_date__year=2005, pub_date__month=7),
['<Article: Swallow programs in Python>'],
)
self.assertQuerysetEqual(
Article.objects.filter(pub_date__week_day=5),
['<Article: Swallow programs in Python>'],
)
self.assertQuerysetEqual(
Article.objects.filter(pub_date__week_day=6),
[],
)
def test_does_not_exist(self):
# Django raises an Article.DoesNotExist exception for get() if the
# parameters don't match any object.
with self.assertRaisesMessage(ObjectDoesNotExist, "Article matching query does not exist."):
Article.objects.get(id__exact=2000,)
# To avoid dict-ordering related errors check only one lookup
# in single assert.
with self.assertRaises(ObjectDoesNotExist):
Article.objects.get(pub_date__year=2005, pub_date__month=8)
with self.assertRaisesMessage(ObjectDoesNotExist, "Article matching query does not exist."):
Article.objects.get(pub_date__week_day=6,)
def test_lookup_by_primary_key(self):
# Lookup by a primary key is the most common case, so Django
# provides a shortcut for primary-key exact lookups.
# The following is identical to articles.get(id=a.id).
self.assertEqual(Article.objects.get(pk=self.a.id), self.a)
# pk can be used as a shortcut for the primary key name in any query.
self.assertQuerysetEqual(Article.objects.filter(pk__in=[self.a.id]), ["<Article: Swallow programs in Python>"])
# Model instances of the same type and same ID are considered equal.
a = Article.objects.get(pk=self.a.id)
b = Article.objects.get(pk=self.a.id)
self.assertEqual(a, b)
def test_too_many(self):
# Create a very similar object
a = Article(
id=None,
headline='Swallow bites Python',
pub_date=datetime(2005, 7, 28),
)
a.save()
self.assertEqual(Article.objects.count(), 2)
# Django raises an Article.MultipleObjectsReturned exception if the
# lookup matches more than one object
msg = "get() returned more than one Article -- it returned 2!"
with self.assertRaisesMessage(MultipleObjectsReturned, msg):
Article.objects.get(headline__startswith='Swallow',)
with self.assertRaisesMessage(MultipleObjectsReturned, msg):
Article.objects.get(pub_date__year=2005,)
with self.assertRaisesMessage(MultipleObjectsReturned, msg):
Article.objects.get(pub_date__year=2005, pub_date__month=7)
class ConcurrentSaveTests(TransactionTestCase):
available_apps = ['basic']
@skipUnlessDBFeature('test_db_allows_multiple_connections')
def test_concurrent_delete_with_save(self):
"""
Test fetching, deleting and finally saving an object - we should get
an insert in this case.
"""
a = Article.objects.create(headline='foo', pub_date=datetime.now())
exceptions = []
def deleter():
try:
# Do not delete a directly - doing so alters its state.
Article.objects.filter(pk=a.pk).delete()
except Exception as e:
exceptions.append(e)
finally:
connections[DEFAULT_DB_ALIAS].close()
self.assertEqual(len(exceptions), 0)
t = threading.Thread(target=deleter)
t.start()
t.join()
a.save()
self.assertEqual(Article.objects.get(pk=a.pk).headline, 'foo')
class ManagerTest(SimpleTestCase):
QUERYSET_PROXY_METHODS = [
'none',
'count',
'dates',
'datetimes',
'distinct',
'extra',
'get',
'get_or_create',
'update_or_create',
'create',
'bulk_create',
'filter',
'aggregate',
'annotate',
'complex_filter',
'exclude',
'in_bulk',
'iterator',
'earliest',
'latest',
'first',
'last',
'order_by',
'select_for_update',
'select_related',
'prefetch_related',
'values',
'values_list',
'update',
'reverse',
'defer',
'only',
'using',
'exists',
'_insert',
'_update',
'raw',
'union',
'intersection',
'difference',
]
def test_manager_methods(self):
"""
This test ensures that the correct set of methods from `QuerySet`
are copied onto `Manager`.
It's particularly useful to prevent accidentally leaking new methods
into `Manager`. New `QuerySet` methods that should also be copied onto
`Manager` will need to be added to `ManagerTest.QUERYSET_PROXY_METHODS`.
"""
self.assertEqual(
sorted(BaseManager._get_queryset_methods(QuerySet)),
sorted(self.QUERYSET_PROXY_METHODS),
)
class SelectOnSaveTests(TestCase):
def test_select_on_save(self):
a1 = Article.objects.create(pub_date=datetime.now())
with self.assertNumQueries(1):
a1.save()
asos = ArticleSelectOnSave.objects.create(pub_date=datetime.now())
with self.assertNumQueries(2):
asos.save()
with self.assertNumQueries(1):
asos.save(force_update=True)
Article.objects.all().delete()
with self.assertRaises(DatabaseError):
with self.assertNumQueries(1):
asos.save(force_update=True)
def test_select_on_save_lying_update(self):
"""
select_on_save works correctly if the database doesn't return correct
information about matched rows from UPDATE.
"""
# Change the manager to not return "row matched" for update().
# We are going to change the Article's _base_manager class
# dynamically. This is a bit of a hack, but it seems hard to
# test this properly otherwise. Article's manager, because
# proxy models use their parent model's _base_manager.
orig_class = Article._base_manager._queryset_class
class FakeQuerySet(QuerySet):
# Make sure the _update method below is in fact called.
called = False
def _update(self, *args, **kwargs):
FakeQuerySet.called = True
super()._update(*args, **kwargs)
return 0
try:
Article._base_manager._queryset_class = FakeQuerySet
asos = ArticleSelectOnSave.objects.create(pub_date=datetime.now())
with self.assertNumQueries(3):
asos.save()
self.assertTrue(FakeQuerySet.called)
# This is not wanted behavior, but this is how Django has always
# behaved for databases that do not return correct information
# about matched rows for UPDATE.
with self.assertRaises(DatabaseError):
asos.save(force_update=True)
with self.assertRaises(DatabaseError):
asos.save(update_fields=['pub_date'])
finally:
Article._base_manager._queryset_class = orig_class
class ModelRefreshTests(TestCase):
def _truncate_ms(self, val):
# MySQL < 5.6.4 removes microseconds from the datetimes which can cause
# problems when comparing the original value to that loaded from DB
return val - timedelta(microseconds=val.microsecond)
def test_refresh(self):
a = Article.objects.create(pub_date=self._truncate_ms(datetime.now()))
Article.objects.create(pub_date=self._truncate_ms(datetime.now()))
Article.objects.filter(pk=a.pk).update(headline='new headline')
with self.assertNumQueries(1):
a.refresh_from_db()
self.assertEqual(a.headline, 'new headline')
orig_pub_date = a.pub_date
new_pub_date = a.pub_date + timedelta(10)
Article.objects.update(headline='new headline 2', pub_date=new_pub_date)
with self.assertNumQueries(1):
a.refresh_from_db(fields=['headline'])
self.assertEqual(a.headline, 'new headline 2')
self.assertEqual(a.pub_date, orig_pub_date)
with self.assertNumQueries(1):
a.refresh_from_db()
self.assertEqual(a.pub_date, new_pub_date)
def test_unknown_kwarg(self):
s = SelfRef.objects.create()
with self.assertRaises(TypeError):
s.refresh_from_db(unknown_kwarg=10)
def test_refresh_fk(self):
s1 = SelfRef.objects.create()
s2 = SelfRef.objects.create()
s3 = SelfRef.objects.create(selfref=s1)
s3_copy = SelfRef.objects.get(pk=s3.pk)
s3_copy.selfref.touched = True
s3.selfref = s2
s3.save()
with self.assertNumQueries(1):
s3_copy.refresh_from_db()
with self.assertNumQueries(1):
# The old related instance was thrown away (the selfref_id has
# changed). It needs to be reloaded on access, so one query
# executed.
self.assertFalse(hasattr(s3_copy.selfref, 'touched'))
self.assertEqual(s3_copy.selfref, s2)
def test_refresh_null_fk(self):
s1 = SelfRef.objects.create()
s2 = SelfRef.objects.create(selfref=s1)
s2.selfref = None
s2.refresh_from_db()
self.assertEqual(s2.selfref, s1)
def test_refresh_unsaved(self):
pub_date = self._truncate_ms(datetime.now())
a = Article.objects.create(pub_date=pub_date)
a2 = Article(id=a.pk)
with self.assertNumQueries(1):
a2.refresh_from_db()
self.assertEqual(a2.pub_date, pub_date)
self.assertEqual(a2._state.db, "default")
def test_refresh_fk_on_delete_set_null(self):
a = Article.objects.create(
headline='Parrot programs in Python',
pub_date=datetime(2005, 7, 28),
)
s1 = SelfRef.objects.create(article=a)
a.delete()
s1.refresh_from_db()
self.assertIsNone(s1.article_id)
self.assertIsNone(s1.article)
def test_refresh_no_fields(self):
a = Article.objects.create(pub_date=self._truncate_ms(datetime.now()))
with self.assertNumQueries(0):
a.refresh_from_db(fields=[])
|
bsd-3-clause
|
maartenq/ansible
|
test/runner/lib/executor.py
|
2
|
50538
|
"""Execute Ansible tests."""
from __future__ import absolute_import, print_function
import json
import os
import collections
import datetime
import re
import tempfile
import time
import textwrap
import functools
import pipes
import sys
import hashlib
import lib.pytar
import lib.thread
from lib.core_ci import (
AnsibleCoreCI,
SshKey,
)
from lib.manage_ci import (
ManageWindowsCI,
ManageNetworkCI,
)
from lib.cloud import (
cloud_filter,
cloud_init,
get_cloud_environment,
get_cloud_platforms,
)
from lib.util import (
ApplicationWarning,
ApplicationError,
SubprocessError,
display,
run_command,
intercept_command,
remove_tree,
make_dirs,
is_shippable,
is_binary_file,
find_executable,
raw_command,
get_coverage_path,
get_available_port,
generate_pip_command,
find_python,
)
from lib.docker_util import (
docker_pull,
docker_run,
get_docker_container_id,
get_docker_container_ip,
)
from lib.ansible_util import (
ansible_environment,
)
from lib.target import (
IntegrationTarget,
walk_external_targets,
walk_internal_targets,
walk_posix_integration_targets,
walk_network_integration_targets,
walk_windows_integration_targets,
walk_units_targets,
)
from lib.changes import (
ShippableChanges,
LocalChanges,
)
from lib.git import (
Git,
)
from lib.classification import (
categorize_changes,
)
from lib.config import (
TestConfig,
EnvironmentConfig,
IntegrationConfig,
NetworkIntegrationConfig,
PosixIntegrationConfig,
ShellConfig,
UnitsConfig,
WindowsIntegrationConfig,
)
from lib.metadata import (
ChangeDescription,
)
SUPPORTED_PYTHON_VERSIONS = (
'2.6',
'2.7',
'3.5',
'3.6',
'3.7',
)
HTTPTESTER_HOSTS = (
'ansible.http.tests',
'sni1.ansible.http.tests',
'fail.ansible.http.tests',
)
def check_startup():
"""Checks to perform at startup before running commands."""
check_legacy_modules()
def check_legacy_modules():
"""Detect conflicts with legacy core/extras module directories to avoid problems later."""
for directory in 'core', 'extras':
path = 'lib/ansible/modules/%s' % directory
for root, _, file_names in os.walk(path):
if file_names:
# the directory shouldn't exist, but if it does, it must contain no files
raise ApplicationError('Files prohibited in "%s". '
'These are most likely legacy modules from version 2.2 or earlier.' % root)
def create_shell_command(command):
"""
:type command: list[str]
:rtype: list[str]
"""
optional_vars = (
'TERM',
)
cmd = ['/usr/bin/env']
cmd += ['%s=%s' % (var, os.environ[var]) for var in optional_vars if var in os.environ]
cmd += command
return cmd
def install_command_requirements(args, python_version=None):
"""
:type args: EnvironmentConfig
:type python_version: str | None
"""
generate_egg_info(args)
if not args.requirements:
return
if isinstance(args, ShellConfig):
return
packages = []
if isinstance(args, TestConfig):
if args.coverage:
packages.append('coverage')
if args.junit:
packages.append('junit-xml')
if not python_version:
python_version = args.python_version
pip = generate_pip_command(find_python(python_version))
commands = [generate_pip_install(pip, args.command, packages=packages)]
if isinstance(args, IntegrationConfig):
for cloud_platform in get_cloud_platforms(args):
commands.append(generate_pip_install(pip, '%s.cloud.%s' % (args.command, cloud_platform)))
commands = [cmd for cmd in commands if cmd]
# only look for changes when more than one requirements file is needed
detect_pip_changes = len(commands) > 1
# first pass to install requirements, changes expected unless environment is already set up
changes = run_pip_commands(args, pip, commands, detect_pip_changes)
if not changes:
return # no changes means we can stop early
# second pass to check for conflicts in requirements, changes are not expected here
changes = run_pip_commands(args, pip, commands, detect_pip_changes)
if not changes:
return # no changes means no conflicts
raise ApplicationError('Conflicts detected in requirements. The following commands reported changes during verification:\n%s' %
'\n'.join((' '.join(pipes.quote(c) for c in cmd) for cmd in changes)))
def run_pip_commands(args, pip, commands, detect_pip_changes=False):
"""
:type args: EnvironmentConfig
:type pip: list[str]
:type commands: list[list[str]]
:type detect_pip_changes: bool
:rtype: list[list[str]]
"""
changes = []
after_list = pip_list(args, pip) if detect_pip_changes else None
for cmd in commands:
if not cmd:
continue
before_list = after_list
try:
run_command(args, cmd)
except SubprocessError as ex:
if ex.status != 2:
raise
# If pip is too old it won't understand the arguments we passed in, so we'll need to upgrade it.
# Installing "coverage" on ubuntu 16.04 fails with the error:
# AttributeError: 'Requirement' object has no attribute 'project_name'
# See: https://bugs.launchpad.net/ubuntu/xenial/+source/python-pip/+bug/1626258
# Upgrading pip works around the issue.
run_command(args, pip + ['install', '--upgrade', 'pip'])
run_command(args, cmd)
after_list = pip_list(args, pip) if detect_pip_changes else None
if before_list != after_list:
changes.append(cmd)
return changes
def pip_list(args, pip):
"""
:type args: EnvironmentConfig
:type pip: list[str]
:rtype: str
"""
stdout, _ = run_command(args, pip + ['list'], capture=True)
return stdout
def generate_egg_info(args):
"""
:type args: EnvironmentConfig
"""
if os.path.isdir('lib/ansible.egg-info'):
return
run_command(args, [args.python_executable, 'setup.py', 'egg_info'], capture=args.verbosity < 3)
def generate_pip_install(pip, command, packages=None):
"""
:type pip: list[str]
:type command: str
:type packages: list[str] | None
:rtype: list[str] | None
"""
constraints = 'test/runner/requirements/constraints.txt'
requirements = 'test/runner/requirements/%s.txt' % command
options = []
if os.path.exists(requirements) and os.path.getsize(requirements):
options += ['-r', requirements]
if packages:
options += packages
if not options:
return None
return pip + ['install', '--disable-pip-version-check', '-c', constraints] + options
def command_shell(args):
"""
:type args: ShellConfig
"""
if args.delegate:
raise Delegate()
install_command_requirements(args)
if args.inject_httptester:
inject_httptester(args)
cmd = create_shell_command(['bash', '-i'])
run_command(args, cmd)
def command_posix_integration(args):
"""
:type args: PosixIntegrationConfig
"""
all_targets = tuple(walk_posix_integration_targets(include_hidden=True))
internal_targets = command_integration_filter(args, all_targets)
command_integration_filtered(args, internal_targets, all_targets)
def command_network_integration(args):
"""
:type args: NetworkIntegrationConfig
"""
default_filename = 'test/integration/inventory.networking'
if args.inventory:
filename = os.path.join('test/integration', args.inventory)
else:
filename = default_filename
if not args.explain and not args.platform and not os.path.exists(filename):
if args.inventory:
filename = os.path.abspath(filename)
raise ApplicationError(
'Inventory not found: %s\n'
'Use --inventory to specify the inventory path.\n'
'Use --platform to provision resources and generate an inventory file.\n'
'See also inventory template: %s.template' % (filename, default_filename)
)
all_targets = tuple(walk_network_integration_targets(include_hidden=True))
internal_targets = command_integration_filter(args, all_targets, init_callback=network_init)
instances = [] # type: list [lib.thread.WrappedThread]
if args.platform:
get_coverage_path(args) # initialize before starting threads
configs = dict((config['platform_version'], config) for config in args.metadata.instance_config)
for platform_version in args.platform:
platform, version = platform_version.split('/', 1)
config = configs.get(platform_version)
if not config:
continue
instance = lib.thread.WrappedThread(functools.partial(network_run, args, platform, version, config))
instance.daemon = True
instance.start()
instances.append(instance)
while any(instance.is_alive() for instance in instances):
time.sleep(1)
remotes = [instance.wait_for_result() for instance in instances]
inventory = network_inventory(remotes)
display.info('>>> Inventory: %s\n%s' % (filename, inventory.strip()), verbosity=3)
if not args.explain:
with open(filename, 'w') as inventory_fd:
inventory_fd.write(inventory)
success = False
try:
command_integration_filtered(args, internal_targets, all_targets)
success = True
finally:
if args.remote_terminate == 'always' or (args.remote_terminate == 'success' and success):
for instance in instances:
instance.result.stop()
def network_init(args, internal_targets):
"""
:type args: NetworkIntegrationConfig
:type internal_targets: tuple[IntegrationTarget]
"""
if not args.platform:
return
if args.metadata.instance_config is not None:
return
platform_targets = set(a for t in internal_targets for a in t.aliases if a.startswith('network/'))
instances = [] # type: list [lib.thread.WrappedThread]
# generate an ssh key (if needed) up front once, instead of for each instance
SshKey(args)
for platform_version in args.platform:
platform, version = platform_version.split('/', 1)
platform_target = 'network/%s/' % platform
if platform_target not in platform_targets:
display.warning('Skipping "%s" because selected tests do not target the "%s" platform.' % (
platform_version, platform))
continue
instance = lib.thread.WrappedThread(functools.partial(network_start, args, platform, version))
instance.daemon = True
instance.start()
instances.append(instance)
while any(instance.is_alive() for instance in instances):
time.sleep(1)
args.metadata.instance_config = [instance.wait_for_result() for instance in instances]
def network_start(args, platform, version):
"""
:type args: NetworkIntegrationConfig
:type platform: str
:type version: str
:rtype: AnsibleCoreCI
"""
core_ci = AnsibleCoreCI(args, platform, version, stage=args.remote_stage, provider=args.remote_provider)
core_ci.start()
return core_ci.save()
def network_run(args, platform, version, config):
"""
:type args: NetworkIntegrationConfig
:type platform: str
:type version: str
:type config: dict[str, str]
:rtype: AnsibleCoreCI
"""
core_ci = AnsibleCoreCI(args, platform, version, stage=args.remote_stage, provider=args.remote_provider, load=False)
core_ci.load(config)
core_ci.wait()
manage = ManageNetworkCI(core_ci)
manage.wait()
return core_ci
def network_inventory(remotes):
"""
:type remotes: list[AnsibleCoreCI]
:rtype: str
"""
groups = dict([(remote.platform, []) for remote in remotes])
net = []
for remote in remotes:
options = dict(
ansible_host=remote.connection.hostname,
ansible_user=remote.connection.username,
ansible_ssh_private_key_file=os.path.abspath(remote.ssh_key.key),
ansible_network_os=remote.platform,
ansible_connection='local'
)
groups[remote.platform].append(
'%s %s' % (
remote.name.replace('.', '-'),
' '.join('%s="%s"' % (k, options[k]) for k in sorted(options)),
)
)
net.append(remote.platform)
groups['net:children'] = net
template = ''
for group in groups:
hosts = '\n'.join(groups[group])
template += textwrap.dedent("""
[%s]
%s
""") % (group, hosts)
inventory = template
return inventory
def command_windows_integration(args):
"""
:type args: WindowsIntegrationConfig
"""
filename = 'test/integration/inventory.winrm'
if not args.explain and not args.windows and not os.path.isfile(filename):
raise ApplicationError('Use the --windows option or provide an inventory file (see %s.template).' % filename)
all_targets = tuple(walk_windows_integration_targets(include_hidden=True))
internal_targets = command_integration_filter(args, all_targets, init_callback=windows_init)
instances = [] # type: list [lib.thread.WrappedThread]
if args.windows:
get_coverage_path(args) # initialize before starting threads
configs = dict((config['platform_version'], config) for config in args.metadata.instance_config)
for version in args.windows:
config = configs['windows/%s' % version]
instance = lib.thread.WrappedThread(functools.partial(windows_run, args, version, config))
instance.daemon = True
instance.start()
instances.append(instance)
while any(instance.is_alive() for instance in instances):
time.sleep(1)
remotes = [instance.wait_for_result() for instance in instances]
inventory = windows_inventory(remotes)
display.info('>>> Inventory: %s\n%s' % (filename, inventory.strip()), verbosity=3)
if not args.explain:
with open(filename, 'w') as inventory_fd:
inventory_fd.write(inventory)
success = False
try:
command_integration_filtered(args, internal_targets, all_targets)
success = True
finally:
if args.remote_terminate == 'always' or (args.remote_terminate == 'success' and success):
for instance in instances:
instance.result.stop()
def windows_init(args, internal_targets): # pylint: disable=locally-disabled, unused-argument
"""
:type args: WindowsIntegrationConfig
:type internal_targets: tuple[IntegrationTarget]
"""
if not args.windows:
return
if args.metadata.instance_config is not None:
return
instances = [] # type: list [lib.thread.WrappedThread]
for version in args.windows:
instance = lib.thread.WrappedThread(functools.partial(windows_start, args, version))
instance.daemon = True
instance.start()
instances.append(instance)
while any(instance.is_alive() for instance in instances):
time.sleep(1)
args.metadata.instance_config = [instance.wait_for_result() for instance in instances]
def windows_start(args, version):
"""
:type args: WindowsIntegrationConfig
:type version: str
:rtype: AnsibleCoreCI
"""
core_ci = AnsibleCoreCI(args, 'windows', version, stage=args.remote_stage, provider=args.remote_provider)
core_ci.start()
return core_ci.save()
def windows_run(args, version, config):
"""
:type args: WindowsIntegrationConfig
:type version: str
:type config: dict[str, str]
:rtype: AnsibleCoreCI
"""
core_ci = AnsibleCoreCI(args, 'windows', version, stage=args.remote_stage, provider=args.remote_provider, load=False)
core_ci.load(config)
core_ci.wait()
manage = ManageWindowsCI(core_ci)
manage.wait()
return core_ci
def windows_inventory(remotes):
"""
:type remotes: list[AnsibleCoreCI]
:rtype: str
"""
hosts = []
for remote in remotes:
options = dict(
ansible_host=remote.connection.hostname,
ansible_user=remote.connection.username,
ansible_password=remote.connection.password,
ansible_port=remote.connection.port,
)
hosts.append(
'%s %s' % (
remote.name.replace('/', '_'),
' '.join('%s="%s"' % (k, options[k]) for k in sorted(options)),
)
)
template = """
[windows]
%s
[windows:vars]
ansible_connection=winrm
ansible_winrm_server_cert_validation=ignore
# support winrm connection tests (temporary solution, does not support testing enable/disable of pipelining)
[winrm:children]
windows
# support winrm binary module tests (temporary solution)
[testhost_binary_modules:children]
windows
"""
template = textwrap.dedent(template)
inventory = template % ('\n'.join(hosts))
return inventory
def command_integration_filter(args, targets, init_callback=None):
"""
:type args: IntegrationConfig
:type targets: collections.Iterable[IntegrationTarget]
:type init_callback: (IntegrationConfig, tuple[IntegrationTarget]) -> None
:rtype: tuple[IntegrationTarget]
"""
targets = tuple(target for target in targets if 'hidden/' not in target.aliases)
changes = get_changes_filter(args)
require = (args.require or []) + changes
exclude = (args.exclude or [])
internal_targets = walk_internal_targets(targets, args.include, exclude, require)
environment_exclude = get_integration_filter(args, internal_targets)
environment_exclude += cloud_filter(args, internal_targets)
if environment_exclude:
exclude += environment_exclude
internal_targets = walk_internal_targets(targets, args.include, exclude, require)
if not internal_targets:
raise AllTargetsSkipped()
if args.start_at and not any(t.name == args.start_at for t in internal_targets):
raise ApplicationError('Start at target matches nothing: %s' % args.start_at)
if init_callback:
init_callback(args, internal_targets)
cloud_init(args, internal_targets)
if args.delegate:
raise Delegate(require=changes, exclude=exclude, integration_targets=internal_targets)
install_command_requirements(args)
return internal_targets
def command_integration_filtered(args, targets, all_targets):
"""
:type args: IntegrationConfig
:type targets: tuple[IntegrationTarget]
:type all_targets: tuple[IntegrationTarget]
"""
found = False
passed = []
failed = []
targets_iter = iter(targets)
all_targets_dict = dict((target.name, target) for target in all_targets)
setup_errors = []
setup_targets_executed = set()
for target in all_targets:
for setup_target in target.setup_once + target.setup_always:
if setup_target not in all_targets_dict:
setup_errors.append('Target "%s" contains invalid setup target: %s' % (target.name, setup_target))
if setup_errors:
raise ApplicationError('Found %d invalid setup aliases:\n%s' % (len(setup_errors), '\n'.join(setup_errors)))
test_dir = os.path.expanduser('~/ansible_testing')
if not args.explain and any('needs/ssh/' in target.aliases for target in targets):
max_tries = 20
display.info('SSH service required for tests. Checking to make sure we can connect.')
for i in range(1, max_tries + 1):
try:
run_command(args, ['ssh', '-o', 'BatchMode=yes', 'localhost', 'id'], capture=True)
display.info('SSH service responded.')
break
except SubprocessError:
if i == max_tries:
raise
seconds = 3
display.warning('SSH service not responding. Waiting %d second(s) before checking again.' % seconds)
time.sleep(seconds)
if args.inject_httptester:
inject_httptester(args)
start_at_task = args.start_at_task
results = {}
for target in targets_iter:
if args.start_at and not found:
found = target.name == args.start_at
if not found:
continue
if args.list_targets:
print(target.name)
continue
tries = 2 if args.retry_on_error else 1
verbosity = args.verbosity
cloud_environment = get_cloud_environment(args, target)
original_environment = EnvironmentDescription(args)
display.info('>>> Environment Description\n%s' % original_environment, verbosity=3)
try:
while tries:
tries -= 1
try:
if cloud_environment:
cloud_environment.setup_once()
run_setup_targets(args, test_dir, target.setup_once, all_targets_dict, setup_targets_executed, False)
start_time = time.time()
run_setup_targets(args, test_dir, target.setup_always, all_targets_dict, setup_targets_executed, True)
if not args.explain:
# create a fresh test directory for each test target
remove_tree(test_dir)
make_dirs(test_dir)
if target.script_path:
command_integration_script(args, target)
else:
command_integration_role(args, target, start_at_task)
start_at_task = None
end_time = time.time()
results[target.name] = dict(
name=target.name,
type=target.type,
aliases=target.aliases,
modules=target.modules,
run_time_seconds=int(end_time - start_time),
setup_once=target.setup_once,
setup_always=target.setup_always,
coverage=args.coverage,
coverage_label=args.coverage_label,
python_version=args.python_version,
)
break
except SubprocessError:
if cloud_environment:
cloud_environment.on_failure(target, tries)
if not original_environment.validate(target.name, throw=False):
raise
if not tries:
raise
display.warning('Retrying test target "%s" with maximum verbosity.' % target.name)
display.verbosity = args.verbosity = 6
start_time = time.time()
original_environment.validate(target.name, throw=True)
end_time = time.time()
results[target.name]['validation_seconds'] = int(end_time - start_time)
passed.append(target)
except Exception as ex:
failed.append(target)
if args.continue_on_error:
display.error(ex)
continue
display.notice('To resume at this test target, use the option: --start-at %s' % target.name)
next_target = next(targets_iter, None)
if next_target:
display.notice('To resume after this test target, use the option: --start-at %s' % next_target.name)
raise
finally:
display.verbosity = args.verbosity = verbosity
if not args.explain:
results_path = 'test/results/data/%s-%s.json' % (args.command, re.sub(r'[^0-9]', '-', str(datetime.datetime.utcnow().replace(microsecond=0))))
data = dict(
targets=results,
)
with open(results_path, 'w') as results_fd:
results_fd.write(json.dumps(data, sort_keys=True, indent=4))
if failed:
raise ApplicationError('The %d integration test(s) listed below (out of %d) failed. See error output above for details:\n%s' % (
len(failed), len(passed) + len(failed), '\n'.join(target.name for target in failed)))
def start_httptester(args):
"""
:type args: EnvironmentConfig
:rtype: str, list[str]
"""
# map ports from remote -> localhost -> container
# passing through localhost is only used when ansible-test is not already running inside a docker container
ports = [
dict(
remote=8080,
container=80,
),
dict(
remote=8443,
container=443,
),
]
container_id = get_docker_container_id()
if container_id:
display.info('Running in docker container: %s' % container_id, verbosity=1)
else:
for item in ports:
item['localhost'] = get_available_port()
docker_pull(args, args.httptester)
httptester_id = run_httptester(args, dict((port['localhost'], port['container']) for port in ports if 'localhost' in port))
if container_id:
container_host = get_docker_container_ip(args, httptester_id)
display.info('Found httptester container address: %s' % container_host, verbosity=1)
else:
container_host = 'localhost'
ssh_options = []
for port in ports:
ssh_options += ['-R', '%d:%s:%d' % (port['remote'], container_host, port.get('localhost', port['container']))]
return httptester_id, ssh_options
def run_httptester(args, ports=None):
"""
:type args: EnvironmentConfig
:type ports: dict[int, int] | None
:rtype: str
"""
options = [
'--detach',
]
if ports:
for localhost_port, container_port in ports.items():
options += ['-p', '%d:%d' % (localhost_port, container_port)]
httptester_id, _ = docker_run(args, args.httptester, options=options)
if args.explain:
httptester_id = 'httptester_id'
else:
httptester_id = httptester_id.strip()
return httptester_id
def inject_httptester(args):
"""
:type args: CommonConfig
"""
comment = ' # ansible-test httptester\n'
append_lines = ['127.0.0.1 %s%s' % (host, comment) for host in HTTPTESTER_HOSTS]
with open('/etc/hosts', 'r+') as hosts_fd:
original_lines = hosts_fd.readlines()
if not any(line.endswith(comment) for line in original_lines):
hosts_fd.writelines(append_lines)
# determine which forwarding mechanism to use
pfctl = find_executable('pfctl', required=False)
iptables = find_executable('iptables', required=False)
if pfctl:
kldload = find_executable('kldload', required=False)
if kldload:
try:
run_command(args, ['kldload', 'pf'], capture=True)
except SubprocessError:
pass # already loaded
rules = '''
rdr pass inet proto tcp from any to any port 80 -> 127.0.0.1 port 8080
rdr pass inet proto tcp from any to any port 443 -> 127.0.0.1 port 8443
'''
cmd = ['pfctl', '-ef', '-']
try:
run_command(args, cmd, capture=True, data=rules)
except SubprocessError:
pass # non-zero exit status on success
elif iptables:
ports = [
(80, 8080),
(443, 8443),
]
for src, dst in ports:
rule = ['-o', 'lo', '-p', 'tcp', '--dport', str(src), '-j', 'REDIRECT', '--to-port', str(dst)]
try:
# check for existing rule
cmd = ['iptables', '-t', 'nat', '-C', 'OUTPUT'] + rule
run_command(args, cmd, capture=True)
except SubprocessError:
# append rule when it does not exist
cmd = ['iptables', '-t', 'nat', '-A', 'OUTPUT'] + rule
run_command(args, cmd, capture=True)
else:
raise ApplicationError('No supported port forwarding mechanism detected.')
def run_setup_targets(args, test_dir, target_names, targets_dict, targets_executed, always):
"""
:type args: IntegrationConfig
:type test_dir: str
:type target_names: list[str]
:type targets_dict: dict[str, IntegrationTarget]
:type targets_executed: set[str]
:type always: bool
"""
for target_name in target_names:
if not always and target_name in targets_executed:
continue
target = targets_dict[target_name]
if not args.explain:
# create a fresh test directory for each test target
remove_tree(test_dir)
make_dirs(test_dir)
if target.script_path:
command_integration_script(args, target)
else:
command_integration_role(args, target, None)
targets_executed.add(target_name)
def integration_environment(args, target, cmd):
"""
:type args: IntegrationConfig
:type target: IntegrationTarget
:type cmd: list[str]
:rtype: dict[str, str]
"""
env = ansible_environment(args)
if args.inject_httptester:
env.update(dict(
HTTPTESTER='1',
))
integration = dict(
JUNIT_OUTPUT_DIR=os.path.abspath('test/results/junit'),
ANSIBLE_CALLBACK_WHITELIST='junit',
ANSIBLE_TEST_CI=args.metadata.ci_provider,
)
if args.debug_strategy:
env.update(dict(ANSIBLE_STRATEGY='debug'))
if 'non_local/' in target.aliases:
if args.coverage:
display.warning('Skipping coverage reporting for non-local test: %s' % target.name)
env.update(dict(ANSIBLE_TEST_REMOTE_INTERPRETER=''))
env.update(integration)
cloud_environment = get_cloud_environment(args, target)
if cloud_environment:
cloud_environment.configure_environment(env, cmd)
return env
def command_integration_script(args, target):
"""
:type args: IntegrationConfig
:type target: IntegrationTarget
"""
display.info('Running %s integration test script' % target.name)
cmd = ['./%s' % os.path.basename(target.script_path)]
if args.verbosity:
cmd.append('-' + ('v' * args.verbosity))
env = integration_environment(args, target, cmd)
cwd = target.path
intercept_command(args, cmd, target_name=target.name, env=env, cwd=cwd)
def command_integration_role(args, target, start_at_task):
"""
:type args: IntegrationConfig
:type target: IntegrationTarget
:type start_at_task: str | None
"""
display.info('Running %s integration test role' % target.name)
vars_file = 'integration_config.yml'
if isinstance(args, WindowsIntegrationConfig):
inventory = 'inventory.winrm'
hosts = 'windows'
gather_facts = False
elif isinstance(args, NetworkIntegrationConfig):
inventory = args.inventory or 'inventory.networking'
hosts = target.name[:target.name.find('_')]
gather_facts = False
else:
inventory = 'inventory'
hosts = 'testhost'
gather_facts = True
cloud_environment = get_cloud_environment(args, target)
if cloud_environment:
hosts = cloud_environment.inventory_hosts or hosts
playbook = '''
- hosts: %s
gather_facts: %s
roles:
- { role: %s }
''' % (hosts, gather_facts, target.name)
with tempfile.NamedTemporaryFile(dir='test/integration', prefix='%s-' % target.name, suffix='.yml') as pb_fd:
pb_fd.write(playbook.encode('utf-8'))
pb_fd.flush()
filename = os.path.basename(pb_fd.name)
display.info('>>> Playbook: %s\n%s' % (filename, playbook.strip()), verbosity=3)
cmd = ['ansible-playbook', filename, '-i', inventory, '-e', '@%s' % vars_file]
if start_at_task:
cmd += ['--start-at-task', start_at_task]
if args.tags:
cmd += ['--tags', args.tags]
if args.skip_tags:
cmd += ['--skip-tags', args.skip_tags]
if args.diff:
cmd += ['--diff']
if isinstance(args, NetworkIntegrationConfig):
if args.testcase:
cmd += ['-e', 'testcase=%s' % args.testcase]
if args.verbosity:
cmd.append('-' + ('v' * args.verbosity))
env = integration_environment(args, target, cmd)
cwd = 'test/integration'
env['ANSIBLE_ROLES_PATH'] = os.path.abspath('test/integration/targets')
intercept_command(args, cmd, target_name=target.name, env=env, cwd=cwd)
def command_units(args):
"""
:type args: UnitsConfig
"""
changes = get_changes_filter(args)
require = (args.require or []) + changes
include, exclude = walk_external_targets(walk_units_targets(), args.include, args.exclude, require)
if not include:
raise AllTargetsSkipped()
if args.delegate:
raise Delegate(require=changes)
version_commands = []
for version in SUPPORTED_PYTHON_VERSIONS:
# run all versions unless version given, in which case run only that version
if args.python and version != args.python_version:
continue
if args.requirements_mode != 'skip':
install_command_requirements(args, version)
env = ansible_environment(args)
cmd = [
'pytest',
'--boxed',
'-r', 'a',
'-n', 'auto',
'--color',
'yes' if args.color else 'no',
'--junit-xml',
'test/results/junit/python%s-units.xml' % version,
]
if args.collect_only:
cmd.append('--collect-only')
if args.verbosity:
cmd.append('-' + ('v' * args.verbosity))
if exclude:
cmd += ['--ignore=%s' % target.path for target in exclude]
cmd += [target.path for target in include]
version_commands.append((version, cmd, env))
if args.requirements_mode == 'only':
sys.exit()
for version, command, env in version_commands:
display.info('Unit test with Python %s' % version)
try:
intercept_command(args, command, target_name='units', env=env, python_version=version)
except SubprocessError as ex:
# pytest exits with status code 5 when all tests are skipped, which isn't an error for our use case
if ex.status != 5:
raise
def get_changes_filter(args):
"""
:type args: TestConfig
:rtype: list[str]
"""
paths = detect_changes(args)
if not args.metadata.change_description:
if paths:
changes = categorize_changes(args, paths, args.command)
else:
changes = ChangeDescription()
args.metadata.change_description = changes
if paths is None:
return [] # change detection not enabled, do not filter targets
if not paths:
raise NoChangesDetected()
if args.metadata.change_description.targets is None:
raise NoTestsForChanges()
return args.metadata.change_description.targets
def detect_changes(args):
"""
:type args: TestConfig
:rtype: list[str] | None
"""
if args.changed and is_shippable():
display.info('Shippable detected, collecting parameters from environment.')
paths = detect_changes_shippable(args)
elif args.changed_from or args.changed_path:
paths = args.changed_path or []
if args.changed_from:
with open(args.changed_from, 'r') as changes_fd:
paths += changes_fd.read().splitlines()
elif args.changed:
paths = detect_changes_local(args)
else:
return None # change detection not enabled
if paths is None:
return None # act as though change detection not enabled, do not filter targets
display.info('Detected changes in %d file(s).' % len(paths))
for path in paths:
display.info(path, verbosity=1)
return paths
def detect_changes_shippable(args):
"""Initialize change detection on Shippable.
:type args: TestConfig
:rtype: list[str] | None
"""
git = Git(args)
result = ShippableChanges(args, git)
if result.is_pr:
job_type = 'pull request'
elif result.is_tag:
job_type = 'tag'
else:
job_type = 'merge commit'
display.info('Processing %s for branch %s commit %s' % (job_type, result.branch, result.commit))
if not args.metadata.changes:
args.metadata.populate_changes(result.diff)
return result.paths
def detect_changes_local(args):
"""
:type args: TestConfig
:rtype: list[str]
"""
git = Git(args)
result = LocalChanges(args, git)
display.info('Detected branch %s forked from %s at commit %s' % (
result.current_branch, result.fork_branch, result.fork_point))
if result.untracked and not args.untracked:
display.warning('Ignored %s untracked file(s). Use --untracked to include them.' %
len(result.untracked))
if result.committed and not args.committed:
display.warning('Ignored %s committed change(s). Omit --ignore-committed to include them.' %
len(result.committed))
if result.staged and not args.staged:
display.warning('Ignored %s staged change(s). Omit --ignore-staged to include them.' %
len(result.staged))
if result.unstaged and not args.unstaged:
display.warning('Ignored %s unstaged change(s). Omit --ignore-unstaged to include them.' %
len(result.unstaged))
names = set()
if args.tracked:
names |= set(result.tracked)
if args.untracked:
names |= set(result.untracked)
if args.committed:
names |= set(result.committed)
if args.staged:
names |= set(result.staged)
if args.unstaged:
names |= set(result.unstaged)
if not args.metadata.changes:
args.metadata.populate_changes(result.diff)
for path in result.untracked:
if is_binary_file(path):
args.metadata.changes[path] = ((0, 0),)
continue
with open(path, 'r') as source_fd:
line_count = len(source_fd.read().splitlines())
args.metadata.changes[path] = ((1, line_count),)
return sorted(names)
def get_integration_filter(args, targets):
"""
:type args: IntegrationConfig
:type targets: tuple[IntegrationTarget]
:rtype: list[str]
"""
if args.tox:
# tox has the same exclusions as the local environment
return get_integration_local_filter(args, targets)
if args.docker:
return get_integration_docker_filter(args, targets)
if args.remote:
return get_integration_remote_filter(args, targets)
return get_integration_local_filter(args, targets)
def common_integration_filter(args, targets, exclude):
"""
:type args: IntegrationConfig
:type targets: tuple[IntegrationTarget]
:type exclude: list[str]
"""
override_disabled = set(target for target in args.include if target.startswith('disabled/'))
if not args.allow_disabled:
skip = 'disabled/'
override = [target.name for target in targets if override_disabled & set(target.aliases)]
skipped = [target.name for target in targets if skip in target.aliases and target.name not in override]
if skipped:
exclude.extend(skipped)
display.warning('Excluding tests marked "%s" which require --allow-disabled or prefixing with "disabled/": %s'
% (skip.rstrip('/'), ', '.join(skipped)))
override_unsupported = set(target for target in args.include if target.startswith('unsupported/'))
if not args.allow_unsupported:
skip = 'unsupported/'
override = [target.name for target in targets if override_unsupported & set(target.aliases)]
skipped = [target.name for target in targets if skip in target.aliases and target.name not in override]
if skipped:
exclude.extend(skipped)
display.warning('Excluding tests marked "%s" which require --allow-unsupported or prefixing with "unsupported/": %s'
% (skip.rstrip('/'), ', '.join(skipped)))
override_unstable = set(target for target in args.include if target.startswith('unstable/'))
if args.allow_unstable_changed:
override_unstable |= set(args.metadata.change_description.focused_targets or [])
if not args.allow_unstable:
skip = 'unstable/'
override = [target.name for target in targets if override_unstable & set(target.aliases)]
skipped = [target.name for target in targets if skip in target.aliases and target.name not in override]
if skipped:
exclude.extend(skipped)
display.warning('Excluding tests marked "%s" which require --allow-unstable or prefixing with "unstable/": %s'
% (skip.rstrip('/'), ', '.join(skipped)))
def get_integration_local_filter(args, targets):
"""
:type args: IntegrationConfig
:type targets: tuple[IntegrationTarget]
:rtype: list[str]
"""
exclude = []
common_integration_filter(args, targets, exclude)
if not args.allow_root and os.getuid() != 0:
skip = 'needs/root/'
skipped = [target.name for target in targets if skip in target.aliases]
if skipped:
exclude.append(skip)
display.warning('Excluding tests marked "%s" which require --allow-root or running as root: %s'
% (skip.rstrip('/'), ', '.join(skipped)))
override_destructive = set(target for target in args.include if target.startswith('destructive/'))
if not args.allow_destructive:
skip = 'destructive/'
override = [target.name for target in targets if override_destructive & set(target.aliases)]
skipped = [target.name for target in targets if skip in target.aliases and target.name not in override]
if skipped:
exclude.extend(skipped)
display.warning('Excluding tests marked "%s" which require --allow-destructive or prefixing with "destructive/" to run locally: %s'
% (skip.rstrip('/'), ', '.join(skipped)))
if args.python_version.startswith('3'):
python_version = 3
else:
python_version = 2
skip = 'skip/python%d/' % python_version
skipped = [target.name for target in targets if skip in target.aliases]
if skipped:
exclude.append(skip)
display.warning('Excluding tests marked "%s" which are not supported on python %d: %s'
% (skip.rstrip('/'), python_version, ', '.join(skipped)))
return exclude
def get_integration_docker_filter(args, targets):
"""
:type args: IntegrationConfig
:type targets: tuple[IntegrationTarget]
:rtype: list[str]
"""
exclude = []
common_integration_filter(args, targets, exclude)
if not args.docker_privileged:
skip = 'needs/privileged/'
skipped = [target.name for target in targets if skip in target.aliases]
if skipped:
exclude.append(skip)
display.warning('Excluding tests marked "%s" which require --docker-privileged to run under docker: %s'
% (skip.rstrip('/'), ', '.join(skipped)))
docker_image = args.docker.split('@')[0] # strip SHA for proper tag comparison
python_version = 2 # images are expected to default to python 2 unless otherwise specified
if docker_image.endswith('py3'):
python_version = 3 # docker images ending in 'py3' are expected to default to python 3
if docker_image.endswith(':default'):
python_version = 3 # docker images tagged 'default' are expected to default to python 3
if args.python: # specifying a numeric --python option overrides the default python
if args.python.startswith('3'):
python_version = 3
elif args.python.startswith('2'):
python_version = 2
skip = 'skip/python%d/' % python_version
skipped = [target.name for target in targets if skip in target.aliases]
if skipped:
exclude.append(skip)
display.warning('Excluding tests marked "%s" which are not supported on python %d: %s'
% (skip.rstrip('/'), python_version, ', '.join(skipped)))
return exclude
def get_integration_remote_filter(args, targets):
"""
:type args: IntegrationConfig
:type targets: tuple[IntegrationTarget]
:rtype: list[str]
"""
parts = args.remote.split('/', 1)
platform = parts[0]
exclude = []
common_integration_filter(args, targets, exclude)
skip = 'skip/%s/' % platform
skipped = [target.name for target in targets if skip in target.aliases]
if skipped:
exclude.append(skip)
display.warning('Excluding tests marked "%s" which are not supported on %s: %s'
% (skip.rstrip('/'), platform, ', '.join(skipped)))
python_version = 2 # remotes are expected to default to python 2
skip = 'skip/python%d/' % python_version
skipped = [target.name for target in targets if skip in target.aliases]
if skipped:
exclude.append(skip)
display.warning('Excluding tests marked "%s" which are not supported on python %d: %s'
% (skip.rstrip('/'), python_version, ', '.join(skipped)))
return exclude
class EnvironmentDescription(object):
"""Description of current running environment."""
def __init__(self, args):
"""Initialize snapshot of environment configuration.
:type args: IntegrationConfig
"""
self.args = args
if self.args.explain:
self.data = {}
return
versions = ['']
versions += SUPPORTED_PYTHON_VERSIONS
versions += list(set(v.split('.')[0] for v in SUPPORTED_PYTHON_VERSIONS))
python_paths = dict((v, find_executable('python%s' % v, required=False)) for v in sorted(versions))
python_versions = dict((v, self.get_version([python_paths[v], '-V'])) for v in sorted(python_paths) if python_paths[v])
pip_paths = dict((v, find_executable('pip%s' % v, required=False)) for v in sorted(versions))
pip_versions = dict((v, self.get_version([pip_paths[v], '--version'])) for v in sorted(pip_paths) if pip_paths[v])
pip_interpreters = dict((v, self.get_shebang(pip_paths[v])) for v in sorted(pip_paths) if pip_paths[v])
known_hosts_hash = self.get_hash(os.path.expanduser('~/.ssh/known_hosts'))
self.data = dict(
python_paths=python_paths,
python_versions=python_versions,
pip_paths=pip_paths,
pip_versions=pip_versions,
pip_interpreters=pip_interpreters,
known_hosts_hash=known_hosts_hash,
)
def __str__(self):
"""
:rtype: str
"""
return json.dumps(self.data, sort_keys=True, indent=4)
def validate(self, target_name, throw):
"""
:type target_name: str
:type throw: bool
:rtype: bool
"""
current = EnvironmentDescription(self.args)
original_json = str(self)
current_json = str(current)
if original_json == current_json:
return True
message = ('Test target "%s" has changed the test environment!\n'
'If these changes are necessary, they must be reverted before the test finishes.\n'
'>>> Original Environment\n'
'%s\n'
'>>> Current Environment\n'
'%s' % (target_name, original_json, current_json))
if throw:
raise ApplicationError(message)
display.error(message)
return False
@staticmethod
def get_version(command):
"""
:type command: list[str]
:rtype: str
"""
try:
stdout, stderr = raw_command(command, capture=True, cmd_verbosity=2)
except SubprocessError:
return None # all failures are equal, we don't care why it failed, only that it did
return (stdout or '').strip() + (stderr or '').strip()
@staticmethod
def get_shebang(path):
"""
:type path: str
:rtype: str
"""
with open(path) as script_fd:
return script_fd.readline()
@staticmethod
def get_hash(path):
"""
:type path: str
:rtype: str | None
"""
if not os.path.exists(path):
return None
file_hash = hashlib.md5()
with open(path, 'rb') as file_fd:
file_hash.update(file_fd.read())
return file_hash.hexdigest()
class NoChangesDetected(ApplicationWarning):
"""Exception when change detection was performed, but no changes were found."""
def __init__(self):
super(NoChangesDetected, self).__init__('No changes detected.')
class NoTestsForChanges(ApplicationWarning):
"""Exception when changes detected, but no tests trigger as a result."""
def __init__(self):
super(NoTestsForChanges, self).__init__('No tests found for detected changes.')
class Delegate(Exception):
"""Trigger command delegation."""
def __init__(self, exclude=None, require=None, integration_targets=None):
"""
:type exclude: list[str] | None
:type require: list[str] | None
:type integration_targets: tuple[IntegrationTarget] | None
"""
super(Delegate, self).__init__()
self.exclude = exclude or []
self.require = require or []
self.integration_targets = integration_targets or tuple()
class AllTargetsSkipped(ApplicationWarning):
"""All targets skipped."""
def __init__(self):
super(AllTargetsSkipped, self).__init__('All targets skipped.')
|
gpl-3.0
|
SlimSaber/android_external_chromium
|
testing/gtest/scripts/upload_gtest.py
|
1963
|
2851
|
#!/usr/bin/env python
#
# Copyright 2009, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""upload_gtest.py v0.1.0 -- uploads a Google Test patch for review.
This simple wrapper passes all command line flags and
[email protected] to upload.py.
USAGE: upload_gtest.py [options for upload.py]
"""
__author__ = '[email protected] (Zhanyong Wan)'
import os
import sys
CC_FLAG = '--cc='
GTEST_GROUP = '[email protected]'
def main():
# Finds the path to upload.py, assuming it is in the same directory
# as this file.
my_dir = os.path.dirname(os.path.abspath(__file__))
upload_py_path = os.path.join(my_dir, 'upload.py')
# Adds Google Test discussion group to the cc line if it's not there
# already.
upload_py_argv = [upload_py_path]
found_cc_flag = False
for arg in sys.argv[1:]:
if arg.startswith(CC_FLAG):
found_cc_flag = True
cc_line = arg[len(CC_FLAG):]
cc_list = [addr for addr in cc_line.split(',') if addr]
if GTEST_GROUP not in cc_list:
cc_list.append(GTEST_GROUP)
upload_py_argv.append(CC_FLAG + ','.join(cc_list))
else:
upload_py_argv.append(arg)
if not found_cc_flag:
upload_py_argv.append(CC_FLAG + GTEST_GROUP)
# Invokes upload.py with the modified command line flags.
os.execv(upload_py_path, upload_py_argv)
if __name__ == '__main__':
main()
|
bsd-3-clause
|
flintsoft/peacecoin
|
contrib/testgen/gen_base58_test_vectors.py
|
1064
|
4344
|
#!/usr/bin/env python
'''
Generate valid and invalid base58 address and private key test vectors.
Usage:
gen_base58_test_vectors.py valid 50 > ../../src/test/data/base58_keys_valid.json
gen_base58_test_vectors.py invalid 50 > ../../src/test/data/base58_keys_invalid.json
'''
# 2012 Wladimir J. van der Laan
# Released under MIT License
import os
from itertools import islice
from base58 import b58encode, b58decode, b58encode_chk, b58decode_chk, b58chars
import random
from binascii import b2a_hex
# key types
PUBKEY_ADDRESS = 48
SCRIPT_ADDRESS = 5
PUBKEY_ADDRESS_TEST = 111
SCRIPT_ADDRESS_TEST = 196
PRIVKEY = 176
PRIVKEY_TEST = 239
metadata_keys = ['isPrivkey', 'isTestnet', 'addrType', 'isCompressed']
# templates for valid sequences
templates = [
# prefix, payload_size, suffix, metadata
# None = N/A
((PUBKEY_ADDRESS,), 20, (), (False, False, 'pubkey', None)),
((SCRIPT_ADDRESS,), 20, (), (False, False, 'script', None)),
((PUBKEY_ADDRESS_TEST,), 20, (), (False, True, 'pubkey', None)),
((SCRIPT_ADDRESS_TEST,), 20, (), (False, True, 'script', None)),
((PRIVKEY,), 32, (), (True, False, None, False)),
((PRIVKEY,), 32, (1,), (True, False, None, True)),
((PRIVKEY_TEST,), 32, (), (True, True, None, False)),
((PRIVKEY_TEST,), 32, (1,), (True, True, None, True))
]
def is_valid(v):
'''Check vector v for validity'''
result = b58decode_chk(v)
if result is None:
return False
valid = False
for template in templates:
prefix = str(bytearray(template[0]))
suffix = str(bytearray(template[2]))
if result.startswith(prefix) and result.endswith(suffix):
if (len(result) - len(prefix) - len(suffix)) == template[1]:
return True
return False
def gen_valid_vectors():
'''Generate valid test vectors'''
while True:
for template in templates:
prefix = str(bytearray(template[0]))
payload = os.urandom(template[1])
suffix = str(bytearray(template[2]))
rv = b58encode_chk(prefix + payload + suffix)
assert is_valid(rv)
metadata = dict([(x,y) for (x,y) in zip(metadata_keys,template[3]) if y is not None])
yield (rv, b2a_hex(payload), metadata)
def gen_invalid_vector(template, corrupt_prefix, randomize_payload_size, corrupt_suffix):
'''Generate possibly invalid vector'''
if corrupt_prefix:
prefix = os.urandom(1)
else:
prefix = str(bytearray(template[0]))
if randomize_payload_size:
payload = os.urandom(max(int(random.expovariate(0.5)), 50))
else:
payload = os.urandom(template[1])
if corrupt_suffix:
suffix = os.urandom(len(template[2]))
else:
suffix = str(bytearray(template[2]))
return b58encode_chk(prefix + payload + suffix)
def randbool(p = 0.5):
'''Return True with P(p)'''
return random.random() < p
def gen_invalid_vectors():
'''Generate invalid test vectors'''
# start with some manual edge-cases
yield "",
yield "x",
while True:
# kinds of invalid vectors:
# invalid prefix
# invalid payload length
# invalid (randomized) suffix (add random data)
# corrupt checksum
for template in templates:
val = gen_invalid_vector(template, randbool(0.2), randbool(0.2), randbool(0.2))
if random.randint(0,10)<1: # line corruption
if randbool(): # add random character to end
val += random.choice(b58chars)
else: # replace random character in the middle
n = random.randint(0, len(val))
val = val[0:n] + random.choice(b58chars) + val[n+1:]
if not is_valid(val):
yield val,
if __name__ == '__main__':
import sys, json
iters = {'valid':gen_valid_vectors, 'invalid':gen_invalid_vectors}
try:
uiter = iters[sys.argv[1]]
except IndexError:
uiter = gen_valid_vectors
try:
count = int(sys.argv[2])
except IndexError:
count = 0
data = list(islice(uiter(), count))
json.dump(data, sys.stdout, sort_keys=True, indent=4)
sys.stdout.write('\n')
|
mit
|
yashaswi-reddy/icsisumm
|
icsisumm-primary-sys34_v1/nltk/nltk-0.9.2/nltk_contrib/wordnet/dbinfo_html.py
|
9
|
7200
|
# Natural Language Toolkit: Wordnet Interface: Graphical Wordnet Browser
#
# Copyright (C) 2007 - 2008 University of Pennsylvania
# Author: Jussi Salmela <[email protected]>
# URL: <http://nltk.sf.net>
# For license information, see LICENSE.TXT
import os
from collections import defaultdict
from itertools import groupby
from urllib import quote_plus
from nltk.wordnet.synset import *
import browseutil as bu
all_pos = ['noun', 'verb', 'adj', 'adv']
col_heads = ['Noun', 'Verb', 'Adjective', 'Adverb', 'Total']
data_path = os.environ['NLTK_DATA'] + '\\corpora\\wordnet\\'
display_names = [('forms','Word forms'), ('simple','--- simple words'),
('collo','--- collocations'), ('syns','Synsets'),
('w_s_pairs','Word-Sense Pairs'),
('monos','Monosemous Words and Senses'),
('poly_words','Polysemous Words'),
('poly_senses','Polysemous Senses'),
('apimw','Average Polysemy Including Monosemous Words'),
('apemw','Average Polysemy Excluding Monosemous Words'),
('rels','Relations')]
def create_db_info():
'''
Create the file: NLTK Wordnet Browser Database Info.html
'''
print 'Database information is being gathered!'
print
print 'Producing this summary may, depending on your computer,'
print 'take a couple of minutes. Please be patient!'
counts = [[0 for i in range(len(col_heads))]
for j in range(len(display_names))]
rel_counts = defaultdict(int)
rel_words = {}
unique_beginners = defaultdict(list)
for n_pos,pos in enumerate(all_pos): #['adv']): #all_pos):
print '\n\nStarting the summary for POS: %s' % col_heads[n_pos]
d = defaultdict(int)
# Word counts
for ind in open(data_path + 'index.' + pos):
if ind.startswith(' '):
continue
ind_parts = ind.split()
syn_count = int(ind_parts[2])
d['w_s_pairs'] += syn_count
if syn_count == 1:
d['monos'] += 1
else:
d['poly_words'] += 1
d['poly_senses'] += syn_count
w = ind_parts[0]
d['forms'] += 1
if w.find('_') != -1:
d['simple'] += 1
else:
d['collo'] += 1
d['apimw'] = 1.0 * (d['monos'] + d['poly_senses']) / \
(d['monos'] + d['poly_words'])
d['apemw'] = 1.0 * d['poly_senses'] / d['poly_words']
# Synsets and relations
for syns in open(data_path + 'data.' + pos):
if syns.startswith(' '):
continue
d['syns'] += 1
synset = getSynset(pos,int(syns[:8]))
syn_rel = bu.relations_2(synset)
if HYPERNYM not in syn_rel and 'hypernym (instance)' not in syn_rel:
unique_beginners[n_pos].append(synset)
d['rels'] += len(syn_rel)
for sr in syn_rel:
rel_counts[(sr,n_pos)] += 1
rel_words[(sr,n_pos)] = synset.words[0]
# Prepare counts for displaying
nd = {}
for n,(x,y) in enumerate(display_names):
nd[x] = n
if x in d:
counts[n][n_pos] = d[x]
counts[n][4] += d[x]
if x == 'apimw' or x == 'apemw':
m_c = counts[nd['monos']][4]
m_ps = counts[nd['poly_senses']][4]
m_pw = counts[nd['poly_words']][4]
if x == 'apimw':
counts[n][4] = 1.0 * (m_c + m_ps) / (m_c + m_pw)
else:
counts[n][4] = 1.0 * m_ps / m_pw
# Format the counts
print '\n\nStarting the construction of result tables'
html = (bu.html_header % '* Database Info *') + \
bu._hlev(2, 'Word, synset and relation counts by POS')
html += '''
<table border="1" cellpadding="1" cellspacing="1"
summary="">
<col align="left"><col align="right"><col align="right">
<col align="right"><col align="right"><col align="right">
<tr><th></th><th align="center">Noun</th><th align="center">Verb</th>
<th align="center">Adjective</th><th align="center">Adverb</th>
<th align="center">Total</th></tr>
'''
for n,(x,y) in enumerate(display_names):
if x == 'rels':
html += '<tr><th align="left"> </th>'
html += ''.join('<td align="right"> </td>' for c in counts[n]) \
+ '</tr>\n'
html += '<tr><th align="left">' + '%s' % y + '</th>'
if x == 'apimw' or x == 'apemw':
html += ''.join('<td align="right">' + '%6.2f ' % c + '</td>' \
for c in counts[n]) + '</tr>\n'
else:
html += ''.join('<td align="right">' + '%6d ' % c + '</td>' \
for c in counts[n]) + '</tr>\n'
# Format the relation counts
r_counts = [0 for i in range(len(col_heads))]
for rk in groupby(sorted(rel_counts.keys()),key=lambda x:x[0]):
for i in range(len(col_heads)):
r_counts[i] = 0
dn = bu._dbname_to_dispname(rk[0]).split('/')
if dn[0] == '???':
dn = rk[0] + '(???)'
else:
dn = dn[0]
html += '<tr><th align="left">' + '%s' % ('--- ' + dn) + '</th>'
for y in rk[1]:
r_counts[y[1]] = rel_counts[y]
r_counts[len(col_heads) - 1] = sum(r_counts)
html += ''.join('<td align="right">' + '%6d ' % rc + '</td>'
for rc in r_counts) + '</tr>\n'
html += '</table>'
# Format the example words for relations
html += '<br><br>' + bu._hlev(2, 'Example words for relations, 1 per POS')
html += '''
<table border="1" cellpadding="1" cellspacing="1"
summary="">
<caption></caption>
<col align="center"><col align="center"><col align="center">
<col align="center"><col align="center">
<tr><th>Relation</th><th>Noun</th><th>Verb</th><th>Adjective</th><th>Adverb</th></tr>
'''
for rk in groupby(sorted(rel_counts.keys()),key=lambda x:x[0]):
dn = bu._dbname_to_dispname(rk[0]).split('/')
if dn[0] == '???':
dn = rk[0] + '(???)'
else:
dn = dn[0]
html += '<tr><th align="center">' + dn + '</th>'
rel_word_examples = [''] * 4
for y in rk[1]:
rel_word_examples[y[1]] = rel_words[y]
hlp = ''.join('<td align="center"><a href="M' + \
quote_plus(x + '#' + str(bu.uniq_cntr())) + '">' + \
x.replace('_', ' ') + '</a></td>' \
for x in rel_word_examples)
hlp = hlp.replace('<a href="M"></a>','-')
html += hlp + '</tr>\n'
html += '</table>' + bu.html_trailer
dbinfo_html_file = open('NLTK Wordnet Browser Database Info.html', 'wt')
dbinfo_html_file.write(html)
dbinfo_html_file.close()
print '\n\nCreation complete: NLTK Wordnet Browser Database Info.html'
return
if __name__ == '__main__':
create_db_info()
|
gpl-3.0
|
brandond/ansible
|
lib/ansible/modules/cloud/cloudstack/cs_snapshot_policy.py
|
14
|
10609
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# (c) 2016, René Moser <[email protected]>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['stableinterface'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: cs_snapshot_policy
short_description: Manages volume snapshot policies on Apache CloudStack based clouds.
description:
- Create, update and delete volume snapshot policies.
version_added: '2.2'
author: "René Moser (@resmo)"
options:
volume:
description:
- Name of the volume.
- Either C(volume) or C(vm) is required.
volume_type:
description:
- Type of the volume.
choices:
- DATADISK
- ROOT
version_added: "2.3"
vm:
description:
- Name of the instance to select the volume from.
- Use C(volume_type) if VM has a DATADISK and ROOT volume.
- In case of C(volume_type=DATADISK), additionally use C(device_id) if VM has more than one DATADISK volume.
- Either C(volume) or C(vm) is required.
version_added: "2.3"
device_id:
description:
- ID of the device on a VM the volume is attached to.
- This will only be considered if VM has multiple DATADISK volumes.
version_added: "2.3"
vpc:
description:
- Name of the vpc the instance is deployed in.
version_added: "2.3"
interval_type:
description:
- Interval of the snapshot.
default: daily
choices: [ hourly, daily, weekly, monthly ]
aliases: [ interval ]
max_snaps:
description:
- Max number of snapshots.
default: 8
aliases: [ max ]
schedule:
description:
- Time the snapshot is scheduled. Required if C(state=present).
- 'Format for C(interval_type=HOURLY): C(MM)'
- 'Format for C(interval_type=DAILY): C(MM:HH)'
- 'Format for C(interval_type=WEEKLY): C(MM:HH:DD (1-7))'
- 'Format for C(interval_type=MONTHLY): C(MM:HH:DD (1-28))'
time_zone:
description:
- Specifies a timezone for this command.
default: UTC
aliases: [ timezone ]
state:
description:
- State of the snapshot policy.
default: present
choices: [ present, absent ]
domain:
description:
- Domain the volume is related to.
account:
description:
- Account the volume is related to.
project:
description:
- Name of the project the volume is related to.
extends_documentation_fragment: cloudstack
'''
EXAMPLES = '''
- name: ensure a snapshot policy daily at 1h00 UTC
local_action:
module: cs_snapshot_policy
volume: ROOT-478
schedule: '00:1'
max_snaps: 3
- name: ensure a snapshot policy daily at 1h00 UTC on the second DATADISK of VM web-01
local_action:
module: cs_snapshot_policy
vm: web-01
volume_type: DATADISK
device_id: 2
schedule: '00:1'
max_snaps: 3
- name: ensure a snapshot policy hourly at minute 5 UTC
local_action:
module: cs_snapshot_policy
volume: ROOT-478
schedule: '5'
interval_type: hourly
max_snaps: 1
- name: ensure a snapshot policy weekly on Sunday at 05h00, TZ Europe/Zurich
local_action:
module: cs_snapshot_policy
volume: ROOT-478
schedule: '00:5:1'
interval_type: weekly
max_snaps: 1
time_zone: 'Europe/Zurich'
- name: ensure a snapshot policy is absent
local_action:
module: cs_snapshot_policy
volume: ROOT-478
interval_type: hourly
state: absent
'''
RETURN = '''
---
id:
description: UUID of the snapshot policy.
returned: success
type: str
sample: a6f7a5fc-43f8-11e5-a151-feff819cdc9f
interval_type:
description: interval type of the snapshot policy.
returned: success
type: str
sample: daily
schedule:
description: schedule of the snapshot policy.
returned: success
type: str
sample:
max_snaps:
description: maximum number of snapshots retained.
returned: success
type: int
sample: 10
time_zone:
description: the time zone of the snapshot policy.
returned: success
type: str
sample: Etc/UTC
volume:
description: the volume of the snapshot policy.
returned: success
type: str
sample: Etc/UTC
zone:
description: Name of zone the volume is related to.
returned: success
type: str
sample: ch-gva-2
project:
description: Name of project the volume is related to.
returned: success
type: str
sample: Production
account:
description: Account the volume is related to.
returned: success
type: str
sample: example account
domain:
description: Domain the volume is related to.
returned: success
type: str
sample: example domain
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.cloudstack import (
AnsibleCloudStack,
cs_argument_spec,
cs_required_together
)
class AnsibleCloudStackSnapshotPolicy(AnsibleCloudStack):
def __init__(self, module):
super(AnsibleCloudStackSnapshotPolicy, self).__init__(module)
self.returns = {
'schedule': 'schedule',
'timezone': 'time_zone',
'maxsnaps': 'max_snaps',
}
self.interval_types = {
'hourly': 0,
'daily': 1,
'weekly': 2,
'monthly': 3,
}
self.volume = None
def get_interval_type(self):
interval_type = self.module.params.get('interval_type')
return self.interval_types[interval_type]
def get_volume(self, key=None):
if self.volume:
return self._get_by_key(key, self.volume)
args = {
'name': self.module.params.get('volume'),
'account': self.get_account(key='name'),
'domainid': self.get_domain(key='id'),
'projectid': self.get_project(key='id'),
'virtualmachineid': self.get_vm(key='id', filter_zone=False),
'type': self.module.params.get('volume_type'),
}
volumes = self.query_api('listVolumes', **args)
if volumes:
if volumes['count'] > 1:
device_id = self.module.params.get('device_id')
if not device_id:
self.module.fail_json(msg="Found more then 1 volume: combine params 'vm', 'volume_type', 'device_id' and/or 'volume' to select the volume")
else:
for v in volumes['volume']:
if v.get('deviceid') == device_id:
self.volume = v
return self._get_by_key(key, self.volume)
self.module.fail_json(msg="No volume found with device id %s" % device_id)
self.volume = volumes['volume'][0]
return self._get_by_key(key, self.volume)
return None
def get_snapshot_policy(self):
args = {
'volumeid': self.get_volume(key='id')
}
policies = self.query_api('listSnapshotPolicies', **args)
if policies:
for policy in policies['snapshotpolicy']:
if policy['intervaltype'] == self.get_interval_type():
return policy
return None
def present_snapshot_policy(self):
required_params = [
'schedule',
]
self.module.fail_on_missing_params(required_params=required_params)
policy = self.get_snapshot_policy()
args = {
'id': policy.get('id') if policy else None,
'intervaltype': self.module.params.get('interval_type'),
'schedule': self.module.params.get('schedule'),
'maxsnaps': self.module.params.get('max_snaps'),
'timezone': self.module.params.get('time_zone'),
'volumeid': self.get_volume(key='id')
}
if not policy or (policy and self.has_changed(policy, args, only_keys=['schedule', 'maxsnaps', 'timezone'])):
self.result['changed'] = True
if not self.module.check_mode:
res = self.query_api('createSnapshotPolicy', **args)
policy = res['snapshotpolicy']
return policy
def absent_snapshot_policy(self):
policy = self.get_snapshot_policy()
if policy:
self.result['changed'] = True
args = {
'id': policy['id']
}
if not self.module.check_mode:
self.query_api('deleteSnapshotPolicies', **args)
return policy
def get_result(self, policy):
super(AnsibleCloudStackSnapshotPolicy, self).get_result(policy)
if policy and 'intervaltype' in policy:
for key, value in self.interval_types.items():
if value == policy['intervaltype']:
self.result['interval_type'] = key
break
volume = self.get_volume()
if volume:
volume_results = {
'volume': volume.get('name'),
'zone': volume.get('zonename'),
'project': volume.get('project'),
'account': volume.get('account'),
'domain': volume.get('domain'),
}
self.result.update(volume_results)
return self.result
def main():
argument_spec = cs_argument_spec()
argument_spec.update(dict(
volume=dict(),
volume_type=dict(choices=['DATADISK', 'ROOT']),
vm=dict(),
device_id=dict(type='int'),
vpc=dict(),
interval_type=dict(default='daily', choices=['hourly', 'daily', 'weekly', 'monthly'], aliases=['interval']),
schedule=dict(),
time_zone=dict(default='UTC', aliases=['timezone']),
max_snaps=dict(type='int', default=8, aliases=['max']),
state=dict(choices=['present', 'absent'], default='present'),
domain=dict(),
account=dict(),
project=dict(),
))
module = AnsibleModule(
argument_spec=argument_spec,
required_together=cs_required_together(),
required_one_of=(
['vm', 'volume'],
),
supports_check_mode=True
)
acs_snapshot_policy = AnsibleCloudStackSnapshotPolicy(module)
state = module.params.get('state')
if state in ['absent']:
policy = acs_snapshot_policy.absent_snapshot_policy()
else:
policy = acs_snapshot_policy.present_snapshot_policy()
result = acs_snapshot_policy.get_result(policy)
module.exit_json(**result)
if __name__ == '__main__':
main()
|
gpl-3.0
|
abhilashnta/edx-platform
|
common/lib/xmodule/xmodule/library_content_module.py
|
34
|
24809
|
# -*- coding: utf-8 -*-
"""
LibraryContent: The XBlock used to include blocks from a library in a course.
"""
import json
from lxml import etree
from copy import copy
from capa.responsetypes import registry
from gettext import ngettext
from lazy import lazy
from .mako_module import MakoModuleDescriptor
from opaque_keys.edx.locator import LibraryLocator
import random
from webob import Response
from xblock.core import XBlock
from xblock.fields import Scope, String, List, Integer, Boolean
from xblock.fragment import Fragment
from xmodule.validation import StudioValidationMessage, StudioValidation
from xmodule.x_module import XModule, STUDENT_VIEW
from xmodule.studio_editable import StudioEditableModule, StudioEditableDescriptor
from .xml_module import XmlDescriptor
from pkg_resources import resource_string # pylint: disable=no-name-in-module
# Make '_' a no-op so we can scrape strings
_ = lambda text: text
ANY_CAPA_TYPE_VALUE = 'any'
def _get_human_name(problem_class):
"""
Get the human-friendly name for a problem type.
"""
return getattr(problem_class, 'human_name', problem_class.__name__)
def _get_capa_types():
"""
Gets capa types tags and labels
"""
capa_types = {tag: _get_human_name(registry.get_class_for_tag(tag)) for tag in registry.registered_tags()}
return [{'value': ANY_CAPA_TYPE_VALUE, 'display_name': _('Any Type')}] + sorted([
{'value': capa_type, 'display_name': caption}
for capa_type, caption in capa_types.items()
], key=lambda item: item.get('display_name'))
class LibraryContentFields(object):
"""
Fields for the LibraryContentModule.
Separated out for now because they need to be added to the module and the
descriptor.
"""
# Please note the display_name of each field below is used in
# common/test/acceptance/pages/studio/library.py:StudioLibraryContentXBlockEditModal
# to locate input elements - keep synchronized
display_name = String(
display_name=_("Display Name"),
help=_("Display name for this module"),
default="Randomized Content Block",
scope=Scope.settings,
)
source_library_id = String(
display_name=_("Library"),
help=_("Select the library from which you want to draw content."),
scope=Scope.settings,
values_provider=lambda instance: instance.source_library_values(),
)
source_library_version = String(
# This is a hidden field that stores the version of source_library when we last pulled content from it
display_name=_("Library Version"),
scope=Scope.settings,
)
mode = String(
display_name=_("Mode"),
help=_("Determines how content is drawn from the library"),
default="random",
values=[
{"display_name": _("Choose n at random"), "value": "random"}
# Future addition: Choose a new random set of n every time the student refreshes the block, for self tests
# Future addition: manually selected blocks
],
scope=Scope.settings,
)
max_count = Integer(
display_name=_("Count"),
help=_("Enter the number of components to display to each student."),
default=1,
scope=Scope.settings,
)
capa_type = String(
display_name=_("Problem Type"),
help=_('Choose a problem type to fetch from the library. If "Any Type" is selected no filtering is applied.'),
default=ANY_CAPA_TYPE_VALUE,
values=_get_capa_types(),
scope=Scope.settings,
)
filters = String(default="") # TBD
has_score = Boolean(
display_name=_("Scored"),
help=_("Set this value to True if this module is either a graded assignment or a practice problem."),
default=False,
scope=Scope.settings,
)
selected = List(
# This is a list of (block_type, block_id) tuples used to record
# which random/first set of matching blocks was selected per user
default=[],
scope=Scope.user_state,
)
has_children = True
@property
def source_library_key(self):
"""
Convenience method to get the library ID as a LibraryLocator and not just a string
"""
return LibraryLocator.from_string(self.source_library_id)
#pylint: disable=abstract-method
@XBlock.wants('library_tools') # Only needed in studio
class LibraryContentModule(LibraryContentFields, XModule, StudioEditableModule):
"""
An XBlock whose children are chosen dynamically from a content library.
Can be used to create randomized assessments among other things.
Note: technically, all matching blocks from the content library are added
as children of this block, but only a subset of those children are shown to
any particular student.
"""
def _publish_event(self, event_name, result, **kwargs):
""" Helper method to publish an event for analytics purposes """
event_data = {
"location": unicode(self.location),
"result": result,
"previous_count": getattr(self, "_last_event_result_count", len(self.selected)),
"max_count": self.max_count,
}
event_data.update(kwargs)
self.runtime.publish(self, "edx.librarycontentblock.content.{}".format(event_name), event_data)
self._last_event_result_count = len(result) # pylint: disable=attribute-defined-outside-init
def selected_children(self):
"""
Returns a set() of block_ids indicating which of the possible children
have been selected to display to the current user.
This reads and updates the "selected" field, which has user_state scope.
Note: self.selected and the return value contain block_ids. To get
actual BlockUsageLocators, it is necessary to use self.children,
because the block_ids alone do not specify the block type.
"""
if hasattr(self, "_selected_set"):
# Already done:
return self._selected_set # pylint: disable=access-member-before-definition
selected = set(tuple(k) for k in self.selected) # set of (block_type, block_id) tuples assigned to this student
lib_tools = self.runtime.service(self, 'library_tools')
format_block_keys = lambda keys: lib_tools.create_block_analytics_summary(self.location.course_key, keys)
# Determine which of our children we will show:
valid_block_keys = set([(c.block_type, c.block_id) for c in self.children]) # pylint: disable=no-member
# Remove any selected blocks that are no longer valid:
invalid_block_keys = (selected - valid_block_keys)
if invalid_block_keys:
selected -= invalid_block_keys
# Publish an event for analytics purposes:
# reason "invalid" means deleted from library or a different library is now being used.
self._publish_event(
"removed",
result=format_block_keys(selected),
removed=format_block_keys(invalid_block_keys),
reason="invalid"
)
# If max_count has been decreased, we may have to drop some previously selected blocks:
overlimit_block_keys = set()
while len(selected) > self.max_count:
overlimit_block_keys.add(selected.pop())
if overlimit_block_keys:
# Publish an event for analytics purposes:
self._publish_event(
"removed",
result=format_block_keys(selected),
removed=format_block_keys(overlimit_block_keys),
reason="overlimit"
)
# Do we have enough blocks now?
num_to_add = self.max_count - len(selected)
if num_to_add > 0:
added_block_keys = None
# We need to select [more] blocks to display to this user:
pool = valid_block_keys - selected
if self.mode == "random":
num_to_add = min(len(pool), num_to_add)
added_block_keys = set(random.sample(pool, num_to_add))
# We now have the correct n random children to show for this user.
else:
raise NotImplementedError("Unsupported mode.")
selected |= added_block_keys
if added_block_keys:
# Publish an event for analytics purposes:
self._publish_event(
"assigned",
result=format_block_keys(selected),
added=format_block_keys(added_block_keys)
)
# Save our selections to the user state, to ensure consistency:
self.selected = list(selected) # TODO: this doesn't save from the LMS "Progress" page.
# Cache the results
self._selected_set = selected # pylint: disable=attribute-defined-outside-init
return selected
def _get_selected_child_blocks(self):
"""
Generator returning XBlock instances of the children selected for the
current user.
"""
for block_type, block_id in self.selected_children():
yield self.runtime.get_block(self.location.course_key.make_usage_key(block_type, block_id))
def student_view(self, context):
fragment = Fragment()
contents = []
child_context = {} if not context else copy(context)
for child in self._get_selected_child_blocks():
for displayable in child.displayable_items():
rendered_child = displayable.render(STUDENT_VIEW, child_context)
fragment.add_frag_resources(rendered_child)
contents.append({
'id': displayable.location.to_deprecated_string(),
'content': rendered_child.content,
})
fragment.add_content(self.system.render_template('vert_module.html', {
'items': contents,
'xblock_context': context,
}))
return fragment
def validate(self):
"""
Validates the state of this Library Content Module Instance.
"""
return self.descriptor.validate()
def author_view(self, context):
"""
Renders the Studio views.
Normal studio view: If block is properly configured, displays library status summary
Studio container view: displays a preview of all possible children.
"""
fragment = Fragment()
root_xblock = context.get('root_xblock')
is_root = root_xblock and root_xblock.location == self.location
if is_root:
# User has clicked the "View" link. Show a preview of all possible children:
if self.children: # pylint: disable=no-member
fragment.add_content(self.system.render_template("library-block-author-preview-header.html", {
'max_count': self.max_count,
'display_name': self.display_name or self.url_name,
}))
context['can_edit_visibility'] = False
self.render_children(context, fragment, can_reorder=False, can_add=False)
# else: When shown on a unit page, don't show any sort of preview -
# just the status of this block in the validation area.
# The following JS is used to make the "Update now" button work on the unit page and the container view:
fragment.add_javascript_url(self.runtime.local_resource_url(self, 'public/js/library_content_edit.js'))
fragment.initialize_js('LibraryContentAuthorView')
return fragment
def get_child_descriptors(self):
"""
Return only the subset of our children relevant to the current student.
"""
return list(self._get_selected_child_blocks())
@XBlock.wants('user')
@XBlock.wants('library_tools') # Only needed in studio
@XBlock.wants('studio_user_permissions') # Only available in studio
class LibraryContentDescriptor(LibraryContentFields, MakoModuleDescriptor, XmlDescriptor, StudioEditableDescriptor):
"""
Descriptor class for LibraryContentModule XBlock.
"""
module_class = LibraryContentModule
mako_template = 'widgets/metadata-edit.html'
js = {'coffee': [resource_string(__name__, 'js/src/vertical/edit.coffee')]}
js_module_name = "VerticalDescriptor"
@property
def non_editable_metadata_fields(self):
non_editable_fields = super(LibraryContentDescriptor, self).non_editable_metadata_fields
# The only supported mode is currently 'random'.
# Add the mode field to non_editable_metadata_fields so that it doesn't
# render in the edit form.
non_editable_fields.extend([LibraryContentFields.mode, LibraryContentFields.source_library_version])
return non_editable_fields
@lazy
def tools(self):
"""
Grab the library tools service or raise an error.
"""
return self.runtime.service(self, 'library_tools')
def get_user_id(self):
"""
Get the ID of the current user.
"""
user_service = self.runtime.service(self, 'user')
if user_service:
# May be None when creating bok choy test fixtures
user_id = user_service.get_current_user().opt_attrs.get('edx-platform.user_id', None)
else:
user_id = None
return user_id
@XBlock.handler
def refresh_children(self, request=None, suffix=None): # pylint: disable=unused-argument
"""
Refresh children:
This method is to be used when any of the libraries that this block
references have been updated. It will re-fetch all matching blocks from
the libraries, and copy them as children of this block. The children
will be given new block_ids, but the definition ID used should be the
exact same definition ID used in the library.
This method will update this block's 'source_library_id' field to store
the version number of the libraries used, so we easily determine if
this block is up to date or not.
"""
user_perms = self.runtime.service(self, 'studio_user_permissions')
user_id = self.get_user_id()
if not self.tools:
return Response("Library Tools unavailable in current runtime.", status=400)
self.tools.update_children(self, user_id, user_perms)
return Response()
# Copy over any overridden settings the course author may have applied to the blocks.
def _copy_overrides(self, store, user_id, source, dest):
"""
Copy any overrides the user has made on blocks in this library.
"""
for field in source.fields.itervalues():
if field.scope == Scope.settings and field.is_set_on(source):
setattr(dest, field.name, field.read_from(source))
if source.has_children:
source_children = [self.runtime.get_block(source_key) for source_key in source.children]
dest_children = [self.runtime.get_block(dest_key) for dest_key in dest.children]
for source_child, dest_child in zip(source_children, dest_children):
self._copy_overrides(store, user_id, source_child, dest_child)
store.update_item(dest, user_id)
def studio_post_duplicate(self, store, source_block):
"""
Used by the studio after basic duplication of a source block. We handle the children
ourselves, because we have to properly reference the library upstream and set the overrides.
Otherwise we'll end up losing data on the next refresh.
"""
# The first task will be to refresh our copy of the library to generate the children.
# We must do this at the currently set version of the library block. Otherwise we may not have
# exactly the same children-- someone may be duplicating an out of date block, after all.
user_id = self.get_user_id()
user_perms = self.runtime.service(self, 'studio_user_permissions')
# pylint: disable=no-member
if not self.tools:
raise RuntimeError("Library tools unavailable, duplication will not be sane!")
self.tools.update_children(self, user_id, user_perms, version=self.source_library_version)
self._copy_overrides(store, user_id, source_block, self)
# Children have been handled.
return True
def _validate_library_version(self, validation, lib_tools, version, library_key):
"""
Validates library version
"""
latest_version = lib_tools.get_library_version(library_key)
if latest_version is not None:
if version is None or version != unicode(latest_version):
validation.set_summary(
StudioValidationMessage(
StudioValidationMessage.WARNING,
_(u'This component is out of date. The library has new content.'),
# TODO: change this to action_runtime_event='...' once the unit page supports that feature.
# See https://openedx.atlassian.net/browse/TNL-993
action_class='library-update-btn',
# Translators: {refresh_icon} placeholder is substituted to "↻" (without double quotes)
action_label=_(u"{refresh_icon} Update now.").format(refresh_icon=u"↻")
)
)
return False
else:
validation.set_summary(
StudioValidationMessage(
StudioValidationMessage.ERROR,
_(u'Library is invalid, corrupt, or has been deleted.'),
action_class='edit-button',
action_label=_(u"Edit Library List.")
)
)
return False
return True
def _set_validation_error_if_empty(self, validation, summary):
""" Helper method to only set validation summary if it's empty """
if validation.empty:
validation.set_summary(summary)
def validate(self):
"""
Validates the state of this Library Content Module Instance. This
is the override of the general XBlock method, and it will also ask
its superclass to validate.
"""
validation = super(LibraryContentDescriptor, self).validate()
if not isinstance(validation, StudioValidation):
validation = StudioValidation.copy(validation)
library_tools = self.runtime.service(self, "library_tools")
if not (library_tools and library_tools.can_use_library_content(self)):
validation.set_summary(
StudioValidationMessage(
StudioValidationMessage.ERROR,
_(
u"This course does not support content libraries. "
u"Contact your system administrator for more information."
)
)
)
return validation
if not self.source_library_id:
validation.set_summary(
StudioValidationMessage(
StudioValidationMessage.NOT_CONFIGURED,
_(u"A library has not yet been selected."),
action_class='edit-button',
action_label=_(u"Select a Library.")
)
)
return validation
lib_tools = self.runtime.service(self, 'library_tools')
self._validate_library_version(validation, lib_tools, self.source_library_version, self.source_library_key)
# Note: we assume refresh_children() has been called
# since the last time fields like source_library_id or capa_types were changed.
matching_children_count = len(self.children) # pylint: disable=no-member
if matching_children_count == 0:
self._set_validation_error_if_empty(
validation,
StudioValidationMessage(
StudioValidationMessage.WARNING,
_(u'There are no matching problem types in the specified libraries.'),
action_class='edit-button',
action_label=_(u"Select another problem type.")
)
)
if matching_children_count < self.max_count:
self._set_validation_error_if_empty(
validation,
StudioValidationMessage(
StudioValidationMessage.WARNING,
(
ngettext(
u'The specified library is configured to fetch {count} problem, ',
u'The specified library is configured to fetch {count} problems, ',
self.max_count
) +
ngettext(
u'but there is only {actual} matching problem.',
u'but there are only {actual} matching problems.',
matching_children_count
)
).format(count=self.max_count, actual=matching_children_count),
action_class='edit-button',
action_label=_(u"Edit the library configuration.")
)
)
return validation
def source_library_values(self):
"""
Return a list of possible values for self.source_library_id
"""
lib_tools = self.runtime.service(self, 'library_tools')
user_perms = self.runtime.service(self, 'studio_user_permissions')
all_libraries = lib_tools.list_available_libraries()
if user_perms:
all_libraries = [
(key, name) for key, name in all_libraries
if user_perms.can_read(key) or self.source_library_id == unicode(key)
]
all_libraries.sort(key=lambda entry: entry[1]) # Sort by name
if self.source_library_id and self.source_library_key not in [entry[0] for entry in all_libraries]:
all_libraries.append((self.source_library_id, _(u"Invalid Library")))
all_libraries = [(u"", _("No Library Selected"))] + all_libraries
values = [{"display_name": name, "value": unicode(key)} for key, name in all_libraries]
return values
def editor_saved(self, user, old_metadata, old_content):
"""
If source_library_id or capa_type has been edited, refresh_children automatically.
"""
old_source_library_id = old_metadata.get('source_library_id', [])
if (old_source_library_id != self.source_library_id or
old_metadata.get('capa_type', ANY_CAPA_TYPE_VALUE) != self.capa_type):
try:
self.refresh_children()
except ValueError:
pass # The validation area will display an error message, no need to do anything now.
def has_dynamic_children(self):
"""
Inform the runtime that our children vary per-user.
See get_child_descriptors() above
"""
return True
def get_content_titles(self):
"""
Returns list of friendly titles for our selected children only; without
thi, all possible children's titles would be seen in the sequence bar in
the LMS.
This overwrites the get_content_titles method included in x_module by default.
"""
titles = []
for child in self._xmodule.get_child_descriptors():
titles.extend(child.get_content_titles())
return titles
@classmethod
def definition_from_xml(cls, xml_object, system):
children = [
# pylint: disable=no-member
system.process_xml(etree.tostring(child)).scope_ids.usage_id
for child in xml_object.getchildren()
]
definition = {
attr_name: json.loads(attr_value)
for attr_name, attr_value in xml_object.attrib
}
return definition, children
def definition_to_xml(self, resource_fs):
""" Exports Library Content Module to XML """
# pylint: disable=no-member
xml_object = etree.Element('library_content')
for child in self.get_children():
self.runtime.add_block_as_child_node(child, xml_object)
# Set node attributes based on our fields.
for field_name, field in self.fields.iteritems():
if field_name in ('children', 'parent', 'content'):
continue
if field.is_set_on(self):
xml_object.set(field_name, unicode(field.read_from(self)))
return xml_object
|
agpl-3.0
|
mkrupcale/ansible
|
lib/ansible/modules/cloud/vmware/vmware_portgroup.py
|
32
|
5724
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2015, Joseph Callen <jcallen () csc.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: vmware_portgroup
short_description: Create a VMware portgroup
description:
- Create a VMware portgroup
version_added: 2.0
author: "Joseph Callen (@jcpowermac), Russell Teague (@mtnbikenc)"
notes:
- Tested on vSphere 5.5
requirements:
- "python >= 2.6"
- PyVmomi
options:
switch_name:
description:
- vSwitch to modify
required: True
portgroup_name:
description:
- Portgroup name to add
required: True
vlan_id:
description:
- VLAN ID to assign to portgroup
required: True
network_policy:
description:
- Network policy specifies layer 2 security settings for a
portgroup such as promiscuous mode, where guest adapter listens
to all the packets, MAC address changes and forged transmits.
Settings are promiscuous_mode, forged_transmits, mac_changes
required: False
version_added: "2.2"
extends_documentation_fragment: vmware.documentation
'''
EXAMPLES = '''
Example from Ansible playbook
- name: Add Management Network VM Portgroup
local_action:
module: vmware_portgroup
hostname: esxi_hostname
username: esxi_username
password: esxi_password
switch_name: vswitch_name
portgroup_name: portgroup_name
vlan_id: vlan_id
- name: Add Portgroup with Promiscuous Mode Enabled
local_action:
module: vmware_portgroup
hostname: esxi_hostname
username: esxi_username
password: esxi_password
switch_name: vswitch_name
portgroup_name: portgroup_name
network_policy:
promiscuous_mode: True
'''
try:
from pyVmomi import vim, vmodl
HAS_PYVMOMI = True
except ImportError:
HAS_PYVMOMI = False
def create_network_policy(promiscuous_mode, forged_transmits, mac_changes):
security_policy = vim.host.NetworkPolicy.SecurityPolicy()
if promiscuous_mode:
security_policy.allowPromiscuous = promiscuous_mode
if forged_transmits:
security_policy.forgedTransmits = forged_transmits
if mac_changes:
security_policy.macChanges = mac_changes
network_policy = vim.host.NetworkPolicy(security=security_policy)
return network_policy
def create_port_group(host_system, portgroup_name, vlan_id, vswitch_name, network_policy):
config = vim.host.NetworkConfig()
config.portgroup = [vim.host.PortGroup.Config()]
config.portgroup[0].changeOperation = "add"
config.portgroup[0].spec = vim.host.PortGroup.Specification()
config.portgroup[0].spec.name = portgroup_name
config.portgroup[0].spec.vlanId = vlan_id
config.portgroup[0].spec.vswitchName = vswitch_name
config.portgroup[0].spec.policy = network_policy
host_network_config_result = host_system.configManager.networkSystem.UpdateNetworkConfig(config, "modify")
return True
def main():
argument_spec = vmware_argument_spec()
argument_spec.update(dict(portgroup_name=dict(required=True, type='str'),
switch_name=dict(required=True, type='str'),
vlan_id=dict(required=True, type='int'),
network_policy=dict(required=False, type='dict', default={})))
module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=False)
if not HAS_PYVMOMI:
module.fail_json(msg='pyvmomi is required for this module')
portgroup_name = module.params['portgroup_name']
switch_name = module.params['switch_name']
vlan_id = module.params['vlan_id']
promiscuous_mode = module.params['network_policy'].get('promiscuous_mode', None)
forged_transmits = module.params['network_policy'].get('forged_transmits', None)
mac_changes = module.params['network_policy'].get('mac_changes', None)
try:
content = connect_to_api(module)
host = get_all_objs(content, [vim.HostSystem])
if not host:
raise SystemExit("Unable to locate Physical Host.")
host_system = host.keys()[0]
if find_host_portgroup_by_name(host_system, portgroup_name):
module.exit_json(changed=False)
network_policy = create_network_policy(promiscuous_mode, forged_transmits, mac_changes)
changed = create_port_group(host_system, portgroup_name, vlan_id, switch_name, network_policy)
module.exit_json(changed=changed)
except vmodl.RuntimeFault as runtime_fault:
module.fail_json(msg=runtime_fault.msg)
except vmodl.MethodFault as method_fault:
module.fail_json(msg=method_fault.msg)
except Exception as e:
module.fail_json(msg=str(e))
from ansible.module_utils.vmware import *
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()
|
gpl-3.0
|
deokjinkim/servo
|
tests/wpt/css-tests/tools/six/test_six.py
|
418
|
22226
|
import operator
import sys
import types
import py
import six
def test_add_doc():
def f():
"""Icky doc"""
pass
six._add_doc(f, """New doc""")
assert f.__doc__ == "New doc"
def test_import_module():
from logging import handlers
m = six._import_module("logging.handlers")
assert m is handlers
def test_integer_types():
assert isinstance(1, six.integer_types)
assert isinstance(-1, six.integer_types)
assert isinstance(six.MAXSIZE + 23, six.integer_types)
assert not isinstance(.1, six.integer_types)
def test_string_types():
assert isinstance("hi", six.string_types)
assert isinstance(six.u("hi"), six.string_types)
assert issubclass(six.text_type, six.string_types)
def test_class_types():
class X:
pass
class Y(object):
pass
assert isinstance(X, six.class_types)
assert isinstance(Y, six.class_types)
assert not isinstance(X(), six.class_types)
def test_text_type():
assert type(six.u("hi")) is six.text_type
def test_binary_type():
assert type(six.b("hi")) is six.binary_type
def test_MAXSIZE():
try:
# This shouldn't raise an overflow error.
six.MAXSIZE.__index__()
except AttributeError:
# Before Python 2.6.
pass
py.test.raises(
(ValueError, OverflowError),
operator.mul, [None], six.MAXSIZE + 1)
def test_lazy():
if six.PY3:
html_name = "html.parser"
else:
html_name = "HTMLParser"
assert html_name not in sys.modules
mod = six.moves.html_parser
assert sys.modules[html_name] is mod
assert "htmlparser" not in six._MovedItems.__dict__
try:
import _tkinter
except ImportError:
have_tkinter = False
else:
have_tkinter = True
have_gdbm = True
try:
import gdbm
except ImportError:
try:
import dbm.gnu
except ImportError:
have_gdbm = False
@py.test.mark.parametrize("item_name",
[item.name for item in six._moved_attributes])
def test_move_items(item_name):
"""Ensure that everything loads correctly."""
try:
item = getattr(six.moves, item_name)
if isinstance(item, types.ModuleType):
__import__("six.moves." + item_name)
except AttributeError:
if item_name == "zip_longest" and sys.version_info < (2, 6):
py.test.skip("zip_longest only available on 2.6+")
except ImportError:
if item_name == "winreg" and not sys.platform.startswith("win"):
py.test.skip("Windows only module")
if item_name.startswith("tkinter"):
if not have_tkinter:
py.test.skip("requires tkinter")
if item_name == "tkinter_ttk" and sys.version_info[:2] <= (2, 6):
py.test.skip("ttk only available on 2.7+")
if item_name.startswith("dbm_gnu") and not have_gdbm:
py.test.skip("requires gdbm")
raise
if sys.version_info[:2] >= (2, 6):
assert item_name in dir(six.moves)
@py.test.mark.parametrize("item_name",
[item.name for item in six._urllib_parse_moved_attributes])
def test_move_items_urllib_parse(item_name):
"""Ensure that everything loads correctly."""
if item_name == "ParseResult" and sys.version_info < (2, 5):
py.test.skip("ParseResult is only found on 2.5+")
if item_name in ("parse_qs", "parse_qsl") and sys.version_info < (2, 6):
py.test.skip("parse_qs[l] is new in 2.6")
if sys.version_info[:2] >= (2, 6):
assert item_name in dir(six.moves.urllib.parse)
getattr(six.moves.urllib.parse, item_name)
@py.test.mark.parametrize("item_name",
[item.name for item in six._urllib_error_moved_attributes])
def test_move_items_urllib_error(item_name):
"""Ensure that everything loads correctly."""
if sys.version_info[:2] >= (2, 6):
assert item_name in dir(six.moves.urllib.error)
getattr(six.moves.urllib.error, item_name)
@py.test.mark.parametrize("item_name",
[item.name for item in six._urllib_request_moved_attributes])
def test_move_items_urllib_request(item_name):
"""Ensure that everything loads correctly."""
if sys.version_info[:2] >= (2, 6):
assert item_name in dir(six.moves.urllib.request)
getattr(six.moves.urllib.request, item_name)
@py.test.mark.parametrize("item_name",
[item.name for item in six._urllib_response_moved_attributes])
def test_move_items_urllib_response(item_name):
"""Ensure that everything loads correctly."""
if sys.version_info[:2] >= (2, 6):
assert item_name in dir(six.moves.urllib.response)
getattr(six.moves.urllib.response, item_name)
@py.test.mark.parametrize("item_name",
[item.name for item in six._urllib_robotparser_moved_attributes])
def test_move_items_urllib_robotparser(item_name):
"""Ensure that everything loads correctly."""
if sys.version_info[:2] >= (2, 6):
assert item_name in dir(six.moves.urllib.robotparser)
getattr(six.moves.urllib.robotparser, item_name)
def test_import_moves_error_1():
from six.moves.urllib.parse import urljoin
from six import moves
# In 1.4.1: AttributeError: 'Module_six_moves_urllib_parse' object has no attribute 'urljoin'
assert moves.urllib.parse.urljoin
def test_import_moves_error_2():
from six import moves
assert moves.urllib.parse.urljoin
# In 1.4.1: ImportError: cannot import name urljoin
from six.moves.urllib.parse import urljoin
def test_import_moves_error_3():
from six.moves.urllib.parse import urljoin
# In 1.4.1: ImportError: cannot import name urljoin
from six.moves.urllib_parse import urljoin
def test_from_imports():
from six.moves.queue import Queue
assert isinstance(Queue, six.class_types)
from six.moves.configparser import ConfigParser
assert isinstance(ConfigParser, six.class_types)
def test_filter():
from six.moves import filter
f = filter(lambda x: x % 2, range(10))
assert six.advance_iterator(f) == 1
def test_filter_false():
from six.moves import filterfalse
f = filterfalse(lambda x: x % 3, range(10))
assert six.advance_iterator(f) == 0
assert six.advance_iterator(f) == 3
assert six.advance_iterator(f) == 6
def test_map():
from six.moves import map
assert six.advance_iterator(map(lambda x: x + 1, range(2))) == 1
def test_zip():
from six.moves import zip
assert six.advance_iterator(zip(range(2), range(2))) == (0, 0)
@py.test.mark.skipif("sys.version_info < (2, 6)")
def test_zip_longest():
from six.moves import zip_longest
it = zip_longest(range(2), range(1))
assert six.advance_iterator(it) == (0, 0)
assert six.advance_iterator(it) == (1, None)
class TestCustomizedMoves:
def teardown_method(self, meth):
try:
del six._MovedItems.spam
except AttributeError:
pass
try:
del six.moves.__dict__["spam"]
except KeyError:
pass
def test_moved_attribute(self):
attr = six.MovedAttribute("spam", "foo", "bar")
if six.PY3:
assert attr.mod == "bar"
else:
assert attr.mod == "foo"
assert attr.attr == "spam"
attr = six.MovedAttribute("spam", "foo", "bar", "lemma")
assert attr.attr == "lemma"
attr = six.MovedAttribute("spam", "foo", "bar", "lemma", "theorm")
if six.PY3:
assert attr.attr == "theorm"
else:
assert attr.attr == "lemma"
def test_moved_module(self):
attr = six.MovedModule("spam", "foo")
if six.PY3:
assert attr.mod == "spam"
else:
assert attr.mod == "foo"
attr = six.MovedModule("spam", "foo", "bar")
if six.PY3:
assert attr.mod == "bar"
else:
assert attr.mod == "foo"
def test_custom_move_module(self):
attr = six.MovedModule("spam", "six", "six")
six.add_move(attr)
six.remove_move("spam")
assert not hasattr(six.moves, "spam")
attr = six.MovedModule("spam", "six", "six")
six.add_move(attr)
from six.moves import spam
assert spam is six
six.remove_move("spam")
assert not hasattr(six.moves, "spam")
def test_custom_move_attribute(self):
attr = six.MovedAttribute("spam", "six", "six", "u", "u")
six.add_move(attr)
six.remove_move("spam")
assert not hasattr(six.moves, "spam")
attr = six.MovedAttribute("spam", "six", "six", "u", "u")
six.add_move(attr)
from six.moves import spam
assert spam is six.u
six.remove_move("spam")
assert not hasattr(six.moves, "spam")
def test_empty_remove(self):
py.test.raises(AttributeError, six.remove_move, "eggs")
def test_get_unbound_function():
class X(object):
def m(self):
pass
assert six.get_unbound_function(X.m) is X.__dict__["m"]
def test_get_method_self():
class X(object):
def m(self):
pass
x = X()
assert six.get_method_self(x.m) is x
py.test.raises(AttributeError, six.get_method_self, 42)
def test_get_method_function():
class X(object):
def m(self):
pass
x = X()
assert six.get_method_function(x.m) is X.__dict__["m"]
py.test.raises(AttributeError, six.get_method_function, hasattr)
def test_get_function_closure():
def f():
x = 42
def g():
return x
return g
cell = six.get_function_closure(f())[0]
assert type(cell).__name__ == "cell"
def test_get_function_code():
def f():
pass
assert isinstance(six.get_function_code(f), types.CodeType)
if not hasattr(sys, "pypy_version_info"):
py.test.raises(AttributeError, six.get_function_code, hasattr)
def test_get_function_defaults():
def f(x, y=3, b=4):
pass
assert six.get_function_defaults(f) == (3, 4)
def test_get_function_globals():
def f():
pass
assert six.get_function_globals(f) is globals()
def test_dictionary_iterators(monkeypatch):
def stock_method_name(iterwhat):
"""Given a method suffix like "lists" or "values", return the name
of the dict method that delivers those on the version of Python
we're running in."""
if six.PY3:
return iterwhat
return 'iter' + iterwhat
class MyDict(dict):
if not six.PY3:
def lists(self, **kw):
return [1, 2, 3]
def iterlists(self, **kw):
return iter([1, 2, 3])
f = MyDict.iterlists
del MyDict.iterlists
setattr(MyDict, stock_method_name('lists'), f)
d = MyDict(zip(range(10), reversed(range(10))))
for name in "keys", "values", "items", "lists":
meth = getattr(six, "iter" + name)
it = meth(d)
assert not isinstance(it, list)
assert list(it) == list(getattr(d, name)())
py.test.raises(StopIteration, six.advance_iterator, it)
record = []
def with_kw(*args, **kw):
record.append(kw["kw"])
return old(*args)
old = getattr(MyDict, stock_method_name(name))
monkeypatch.setattr(MyDict, stock_method_name(name), with_kw)
meth(d, kw=42)
assert record == [42]
monkeypatch.undo()
@py.test.mark.skipif(sys.version_info[:2] < (2, 7),
reason="view methods on dictionaries only available on 2.7+")
def test_dictionary_views():
def stock_method_name(viewwhat):
"""Given a method suffix like "keys" or "values", return the name
of the dict method that delivers those on the version of Python
we're running in."""
if six.PY3:
return viewwhat
return 'view' + viewwhat
d = dict(zip(range(10), (range(11, 20))))
for name in "keys", "values", "items":
meth = getattr(six, "view" + name)
view = meth(d)
assert set(view) == set(getattr(d, name)())
def test_advance_iterator():
assert six.next is six.advance_iterator
l = [1, 2]
it = iter(l)
assert six.next(it) == 1
assert six.next(it) == 2
py.test.raises(StopIteration, six.next, it)
py.test.raises(StopIteration, six.next, it)
def test_iterator():
class myiter(six.Iterator):
def __next__(self):
return 13
assert six.advance_iterator(myiter()) == 13
class myitersub(myiter):
def __next__(self):
return 14
assert six.advance_iterator(myitersub()) == 14
def test_callable():
class X:
def __call__(self):
pass
def method(self):
pass
assert six.callable(X)
assert six.callable(X())
assert six.callable(test_callable)
assert six.callable(hasattr)
assert six.callable(X.method)
assert six.callable(X().method)
assert not six.callable(4)
assert not six.callable("string")
def test_create_bound_method():
class X(object):
pass
def f(self):
return self
x = X()
b = six.create_bound_method(f, x)
assert isinstance(b, types.MethodType)
assert b() is x
if six.PY3:
def test_b():
data = six.b("\xff")
assert isinstance(data, bytes)
assert len(data) == 1
assert data == bytes([255])
def test_u():
s = six.u("hi \u0439 \U00000439 \\ \\\\ \n")
assert isinstance(s, str)
assert s == "hi \u0439 \U00000439 \\ \\\\ \n"
else:
def test_b():
data = six.b("\xff")
assert isinstance(data, str)
assert len(data) == 1
assert data == "\xff"
def test_u():
s = six.u("hi \u0439 \U00000439 \\ \\\\ \n")
assert isinstance(s, unicode)
assert s == "hi \xd0\xb9 \xd0\xb9 \\ \\\\ \n".decode("utf8")
def test_u_escapes():
s = six.u("\u1234")
assert len(s) == 1
def test_unichr():
assert six.u("\u1234") == six.unichr(0x1234)
assert type(six.u("\u1234")) is type(six.unichr(0x1234))
def test_int2byte():
assert six.int2byte(3) == six.b("\x03")
py.test.raises((OverflowError, ValueError), six.int2byte, 256)
def test_byte2int():
assert six.byte2int(six.b("\x03")) == 3
assert six.byte2int(six.b("\x03\x04")) == 3
py.test.raises(IndexError, six.byte2int, six.b(""))
def test_bytesindex():
assert six.indexbytes(six.b("hello"), 3) == ord("l")
def test_bytesiter():
it = six.iterbytes(six.b("hi"))
assert six.next(it) == ord("h")
assert six.next(it) == ord("i")
py.test.raises(StopIteration, six.next, it)
def test_StringIO():
fp = six.StringIO()
fp.write(six.u("hello"))
assert fp.getvalue() == six.u("hello")
def test_BytesIO():
fp = six.BytesIO()
fp.write(six.b("hello"))
assert fp.getvalue() == six.b("hello")
def test_exec_():
def f():
l = []
six.exec_("l.append(1)")
assert l == [1]
f()
ns = {}
six.exec_("x = 42", ns)
assert ns["x"] == 42
glob = {}
loc = {}
six.exec_("global y; y = 42; x = 12", glob, loc)
assert glob["y"] == 42
assert "x" not in glob
assert loc["x"] == 12
assert "y" not in loc
def test_reraise():
def get_next(tb):
if six.PY3:
return tb.tb_next.tb_next
else:
return tb.tb_next
e = Exception("blah")
try:
raise e
except Exception:
tp, val, tb = sys.exc_info()
try:
six.reraise(tp, val, tb)
except Exception:
tp2, value2, tb2 = sys.exc_info()
assert tp2 is Exception
assert value2 is e
assert tb is get_next(tb2)
try:
six.reraise(tp, val)
except Exception:
tp2, value2, tb2 = sys.exc_info()
assert tp2 is Exception
assert value2 is e
assert tb2 is not tb
try:
six.reraise(tp, val, tb2)
except Exception:
tp2, value2, tb3 = sys.exc_info()
assert tp2 is Exception
assert value2 is e
assert get_next(tb3) is tb2
try:
six.reraise(tp, None, tb)
except Exception:
tp2, value2, tb2 = sys.exc_info()
assert tp2 is Exception
assert value2 is not val
assert isinstance(value2, Exception)
assert tb is get_next(tb2)
def test_raise_from():
try:
try:
raise Exception("blah")
except Exception:
ctx = sys.exc_info()[1]
f = Exception("foo")
six.raise_from(f, None)
except Exception:
tp, val, tb = sys.exc_info()
if sys.version_info[:2] > (3, 0):
# We should have done a raise f from None equivalent.
assert val.__cause__ is None
assert val.__context__ is ctx
if sys.version_info[:2] >= (3, 3):
# And that should suppress the context on the exception.
assert val.__suppress_context__
# For all versions the outer exception should have raised successfully.
assert str(val) == "foo"
def test_print_():
save = sys.stdout
out = sys.stdout = six.moves.StringIO()
try:
six.print_("Hello,", "person!")
finally:
sys.stdout = save
assert out.getvalue() == "Hello, person!\n"
out = six.StringIO()
six.print_("Hello,", "person!", file=out)
assert out.getvalue() == "Hello, person!\n"
out = six.StringIO()
six.print_("Hello,", "person!", file=out, end="")
assert out.getvalue() == "Hello, person!"
out = six.StringIO()
six.print_("Hello,", "person!", file=out, sep="X")
assert out.getvalue() == "Hello,Xperson!\n"
out = six.StringIO()
six.print_(six.u("Hello,"), six.u("person!"), file=out)
result = out.getvalue()
assert isinstance(result, six.text_type)
assert result == six.u("Hello, person!\n")
six.print_("Hello", file=None) # This works.
out = six.StringIO()
six.print_(None, file=out)
assert out.getvalue() == "None\n"
@py.test.mark.skipif("sys.version_info[:2] >= (2, 6)")
def test_print_encoding(monkeypatch):
# Fool the type checking in print_.
monkeypatch.setattr(six, "file", six.BytesIO, raising=False)
out = six.BytesIO()
out.encoding = "utf-8"
out.errors = None
six.print_(six.u("\u053c"), end="", file=out)
assert out.getvalue() == six.b("\xd4\xbc")
out = six.BytesIO()
out.encoding = "ascii"
out.errors = "strict"
py.test.raises(UnicodeEncodeError, six.print_, six.u("\u053c"), file=out)
out.errors = "backslashreplace"
six.print_(six.u("\u053c"), end="", file=out)
assert out.getvalue() == six.b("\\u053c")
def test_print_exceptions():
py.test.raises(TypeError, six.print_, x=3)
py.test.raises(TypeError, six.print_, end=3)
py.test.raises(TypeError, six.print_, sep=42)
def test_with_metaclass():
class Meta(type):
pass
class X(six.with_metaclass(Meta)):
pass
assert type(X) is Meta
assert issubclass(X, object)
class Base(object):
pass
class X(six.with_metaclass(Meta, Base)):
pass
assert type(X) is Meta
assert issubclass(X, Base)
class Base2(object):
pass
class X(six.with_metaclass(Meta, Base, Base2)):
pass
assert type(X) is Meta
assert issubclass(X, Base)
assert issubclass(X, Base2)
assert X.__mro__ == (X, Base, Base2, object)
def test_wraps():
def f(g):
@six.wraps(g)
def w():
return 42
return w
def k():
pass
original_k = k
k = f(f(k))
assert hasattr(k, '__wrapped__')
k = k.__wrapped__
assert hasattr(k, '__wrapped__')
k = k.__wrapped__
assert k is original_k
assert not hasattr(k, '__wrapped__')
def f(g, assign, update):
def w():
return 42
w.glue = {"foo" : "bar"}
return six.wraps(g, assign, update)(w)
k.glue = {"melon" : "egg"}
k.turnip = 43
k = f(k, ["turnip"], ["glue"])
assert k.__name__ == "w"
assert k.turnip == 43
assert k.glue == {"melon" : "egg", "foo" : "bar"}
def test_add_metaclass():
class Meta(type):
pass
class X:
"success"
X = six.add_metaclass(Meta)(X)
assert type(X) is Meta
assert issubclass(X, object)
assert X.__module__ == __name__
assert X.__doc__ == "success"
class Base(object):
pass
class X(Base):
pass
X = six.add_metaclass(Meta)(X)
assert type(X) is Meta
assert issubclass(X, Base)
class Base2(object):
pass
class X(Base, Base2):
pass
X = six.add_metaclass(Meta)(X)
assert type(X) is Meta
assert issubclass(X, Base)
assert issubclass(X, Base2)
# Test a second-generation subclass of a type.
class Meta1(type):
m1 = "m1"
class Meta2(Meta1):
m2 = "m2"
class Base:
b = "b"
Base = six.add_metaclass(Meta1)(Base)
class X(Base):
x = "x"
X = six.add_metaclass(Meta2)(X)
assert type(X) is Meta2
assert issubclass(X, Base)
assert type(Base) is Meta1
assert "__dict__" not in vars(X)
instance = X()
instance.attr = "test"
assert vars(instance) == {"attr": "test"}
assert instance.b == Base.b
assert instance.x == X.x
# Test a class with slots.
class MySlots(object):
__slots__ = ["a", "b"]
MySlots = six.add_metaclass(Meta1)(MySlots)
assert MySlots.__slots__ == ["a", "b"]
instance = MySlots()
instance.a = "foo"
py.test.raises(AttributeError, setattr, instance, "c", "baz")
# Test a class with string for slots.
class MyStringSlots(object):
__slots__ = "ab"
MyStringSlots = six.add_metaclass(Meta1)(MyStringSlots)
assert MyStringSlots.__slots__ == "ab"
instance = MyStringSlots()
instance.ab = "foo"
py.test.raises(AttributeError, setattr, instance, "a", "baz")
py.test.raises(AttributeError, setattr, instance, "b", "baz")
class MySlotsWeakref(object):
__slots__ = "__weakref__",
MySlotsWeakref = six.add_metaclass(Meta)(MySlotsWeakref)
assert type(MySlotsWeakref) is Meta
|
mpl-2.0
|
splunk/splunk-webframework
|
server/splunkdj/management/commands/wsgiserver/mediahandler.py
|
1
|
3038
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# fix these up
import os, stat, mimetypes
import django
from django.utils.http import http_date
from django.conf import settings
from django.contrib.staticfiles import finders
import logging
logger = logging.getLogger('splunk')
class BlockIterator(object):
# Vlada Macek Says:
# September 29th, 2009 at 14:42
# You’re handing the static files by
# output = [fp.read()]
# fp.close()
# which causes entire content is loaded to the memory (and not only once
# by my observation). This is unacceptable for large files. I found this
# to be much more memory & CPU efficient:
def __init__(self, fp):
self.fp = fp
def __iter__(self):
return self
def next(self):
chunk = self.fp.read(20*1024)
if chunk:
return chunk
self.fp.close()
raise StopIteration
class MediaHandler( object ):
def __init__( self, media_root ):
self.media_root = media_root
def __call__( self, environ, start_response ):
def done( status, headers, output ):
start_response( status, headers.items() )
return output
path_info = environ['PATH_INFO']
if path_info[0] == '/':
path_info = path_info[1:]
# this is the thing I'm not sure is secure, can we prevent
# going up outof media root?
file_path = os.path.join( self.media_root, path_info )
if not os.path.exists( file_path ):
file_path = finders.find(path_info)
if not file_path or not os.path.exists( file_path ):
status = '404 NOT FOUND'
headers = {'Content-type': 'text/plain'}
output = ['Page not found: %s' % path_info]
return done( status, headers, output )
try:
fp = open( file_path, 'rb' )
except IOError, e:
status = '401 UNAUTHORIZED'
headers = {'Content-type': 'text/plain'}
output = ['Permission denied: %s' % file_path]
return done( status, headers, output )
# This is a very simple implementation of conditional GET with
# the Last-Modified header. It makes media files a bit speedier
# because the files are only read off disk for the first request
# (assuming the browser/client supports conditional GET).
mtime = str(http_date( os.stat(file_path)[stat.ST_MTIME] ))
headers = {'Last-Modified': mtime}
if environ.get('HTTP_IF_MODIFIED_SINCE', None) == mtime:
status = '304 NOT MODIFIED'
output = []
else:
status = '200 OK'
mime_type = mimetypes.guess_type(file_path)[0]
if mime_type:
headers['Content-Type'] = mime_type
#output = [fp.read()]
# fp.close()
output = BlockIterator(fp)
return done( status, headers, output )
|
apache-2.0
|
rcharp/toyota-flask
|
venv/lib/python2.7/site-packages/requests/packages/chardet/langbulgarianmodel.py
|
2965
|
12784
|
######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
# 255: Control characters that usually does not exist in any text
# 254: Carriage/Return
# 253: symbol (punctuation) that does not belong to word
# 252: 0 - 9
# Character Mapping Table:
# this table is modified base on win1251BulgarianCharToOrderMap, so
# only number <64 is sure valid
Latin5_BulgarianCharToOrderMap = (
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
253, 77, 90, 99,100, 72,109,107,101, 79,185, 81,102, 76, 94, 82, # 40
110,186,108, 91, 74,119, 84, 96,111,187,115,253,253,253,253,253, # 50
253, 65, 69, 70, 66, 63, 68,112,103, 92,194,104, 95, 86, 87, 71, # 60
116,195, 85, 93, 97,113,196,197,198,199,200,253,253,253,253,253, # 70
194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209, # 80
210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225, # 90
81,226,227,228,229,230,105,231,232,233,234,235,236, 45,237,238, # a0
31, 32, 35, 43, 37, 44, 55, 47, 40, 59, 33, 46, 38, 36, 41, 30, # b0
39, 28, 34, 51, 48, 49, 53, 50, 54, 57, 61,239, 67,240, 60, 56, # c0
1, 18, 9, 20, 11, 3, 23, 15, 2, 26, 12, 10, 14, 6, 4, 13, # d0
7, 8, 5, 19, 29, 25, 22, 21, 27, 24, 17, 75, 52,241, 42, 16, # e0
62,242,243,244, 58,245, 98,246,247,248,249,250,251, 91,252,253, # f0
)
win1251BulgarianCharToOrderMap = (
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
253, 77, 90, 99,100, 72,109,107,101, 79,185, 81,102, 76, 94, 82, # 40
110,186,108, 91, 74,119, 84, 96,111,187,115,253,253,253,253,253, # 50
253, 65, 69, 70, 66, 63, 68,112,103, 92,194,104, 95, 86, 87, 71, # 60
116,195, 85, 93, 97,113,196,197,198,199,200,253,253,253,253,253, # 70
206,207,208,209,210,211,212,213,120,214,215,216,217,218,219,220, # 80
221, 78, 64, 83,121, 98,117,105,222,223,224,225,226,227,228,229, # 90
88,230,231,232,233,122, 89,106,234,235,236,237,238, 45,239,240, # a0
73, 80,118,114,241,242,243,244,245, 62, 58,246,247,248,249,250, # b0
31, 32, 35, 43, 37, 44, 55, 47, 40, 59, 33, 46, 38, 36, 41, 30, # c0
39, 28, 34, 51, 48, 49, 53, 50, 54, 57, 61,251, 67,252, 60, 56, # d0
1, 18, 9, 20, 11, 3, 23, 15, 2, 26, 12, 10, 14, 6, 4, 13, # e0
7, 8, 5, 19, 29, 25, 22, 21, 27, 24, 17, 75, 52,253, 42, 16, # f0
)
# Model Table:
# total sequences: 100%
# first 512 sequences: 96.9392%
# first 1024 sequences:3.0618%
# rest sequences: 0.2992%
# negative sequences: 0.0020%
BulgarianLangModel = (
0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,2,3,3,3,3,3,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,0,3,3,3,2,2,3,2,2,1,2,2,
3,1,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,0,3,3,3,3,3,3,3,3,3,3,0,3,0,1,
0,0,0,0,0,0,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,2,3,3,3,3,3,3,3,3,0,3,1,0,
0,1,0,0,0,0,0,0,0,0,1,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
3,2,2,2,3,3,3,3,3,3,3,3,3,3,3,3,3,1,3,2,3,3,3,3,3,3,3,3,0,3,0,0,
0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,2,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,1,3,2,3,3,3,3,3,3,3,3,0,3,0,0,
0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,3,3,3,2,3,2,2,1,3,3,3,3,2,2,2,1,1,2,0,1,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,3,3,2,3,2,2,3,3,1,1,2,3,3,2,3,3,3,3,2,1,2,0,2,0,3,0,0,
0,0,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,3,3,1,3,3,3,3,3,2,3,2,3,3,3,3,3,2,3,3,1,3,0,3,0,2,0,0,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,3,3,3,1,3,3,2,3,3,3,1,3,3,2,3,2,2,2,0,0,2,0,2,0,2,0,0,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,3,3,3,3,0,3,3,3,2,2,3,3,3,1,2,2,3,2,1,1,2,0,2,0,0,0,0,
1,0,0,0,0,0,0,0,0,0,2,0,0,1,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,3,3,2,3,3,1,2,3,2,2,2,3,3,3,3,3,2,2,3,1,2,0,2,1,2,0,0,
0,0,0,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,1,3,3,3,3,3,2,3,3,3,2,3,3,2,3,2,2,2,3,1,2,0,1,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,3,3,3,3,3,3,1,1,1,2,2,1,3,1,3,2,2,3,0,0,1,0,1,0,1,0,0,
0,0,0,1,0,0,0,0,1,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,2,2,3,2,2,3,1,2,1,1,1,2,3,1,3,1,2,2,0,1,1,1,1,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,1,3,2,2,3,3,1,2,3,1,1,3,3,3,3,1,2,2,1,1,1,0,2,0,2,0,1,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,2,2,3,3,3,2,2,1,1,2,0,2,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
3,0,1,2,1,3,3,2,3,3,3,3,3,2,3,2,1,0,3,1,2,1,2,1,2,3,2,1,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,1,1,2,3,3,3,3,3,3,3,3,3,3,3,3,0,0,3,1,3,3,2,3,3,2,2,2,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,3,3,3,3,0,3,3,3,3,3,2,1,1,2,1,3,3,0,3,1,1,1,1,3,2,0,1,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
3,3,2,2,2,3,3,3,3,3,3,3,3,3,3,3,1,1,3,1,3,3,2,3,2,2,2,3,0,2,0,0,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,2,3,3,2,2,3,2,1,1,1,1,1,3,1,3,1,1,0,0,0,1,0,0,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,2,3,2,0,3,2,0,3,0,2,0,0,2,1,3,1,0,0,1,0,0,0,1,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,2,1,1,1,1,2,1,1,2,1,1,1,2,2,1,2,1,1,1,0,1,1,0,1,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,2,1,3,1,1,2,1,3,2,1,1,0,1,2,3,2,1,1,1,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,3,3,3,3,2,2,1,0,1,0,0,1,0,0,0,2,1,0,3,0,0,1,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,2,3,2,3,3,1,3,2,1,1,1,2,1,1,2,1,3,0,1,0,0,0,1,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,1,1,2,2,3,3,2,3,2,2,2,3,1,2,2,1,1,2,1,1,2,2,0,1,1,0,1,0,2,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,2,1,3,1,0,2,2,1,3,2,1,0,0,2,0,2,0,1,0,0,0,0,0,0,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,3,1,2,0,2,3,1,2,3,2,0,1,3,1,2,1,1,1,0,0,1,0,0,2,2,2,3,
2,2,2,2,1,2,1,1,2,2,1,1,2,0,1,1,1,0,0,1,1,0,0,1,1,0,0,0,1,1,0,1,
3,3,3,3,3,2,1,2,2,1,2,0,2,0,1,0,1,2,1,2,1,1,0,0,0,1,0,1,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,
3,3,2,3,3,1,1,3,1,0,3,2,1,0,0,0,1,2,0,2,0,1,0,0,0,1,0,1,2,1,2,2,
1,1,1,1,1,1,1,2,2,2,1,1,1,1,1,1,1,0,1,2,1,1,1,0,0,0,0,0,1,1,0,0,
3,1,0,1,0,2,3,2,2,2,3,2,2,2,2,2,1,0,2,1,2,1,1,1,0,1,2,1,2,2,2,1,
1,1,2,2,2,2,1,2,1,1,0,1,2,1,2,2,2,1,1,1,0,1,1,1,1,2,0,1,0,0,0,0,
2,3,2,3,3,0,0,2,1,0,2,1,0,0,0,0,2,3,0,2,0,0,0,0,0,1,0,0,2,0,1,2,
2,1,2,1,2,2,1,1,1,2,1,1,1,0,1,2,2,1,1,1,1,1,0,1,1,1,0,0,1,2,0,0,
3,3,2,2,3,0,2,3,1,1,2,0,0,0,1,0,0,2,0,2,0,0,0,1,0,1,0,1,2,0,2,2,
1,1,1,1,2,1,0,1,2,2,2,1,1,1,1,1,1,1,0,1,1,1,0,0,0,0,0,0,1,1,0,0,
2,3,2,3,3,0,0,3,0,1,1,0,1,0,0,0,2,2,1,2,0,0,0,0,0,0,0,0,2,0,1,2,
2,2,1,1,1,1,1,2,2,2,1,0,2,0,1,0,1,0,0,1,0,1,0,0,1,0,0,0,0,1,0,0,
3,3,3,3,2,2,2,2,2,0,2,1,1,1,1,2,1,2,1,1,0,2,0,1,0,1,0,0,2,0,1,2,
1,1,1,1,1,1,1,2,2,1,1,0,2,0,1,0,2,0,0,1,1,1,0,0,2,0,0,0,1,1,0,0,
2,3,3,3,3,1,0,0,0,0,0,0,0,0,0,0,2,0,0,1,1,0,0,0,0,0,0,1,2,0,1,2,
2,2,2,1,1,2,1,1,2,2,2,1,2,0,1,1,1,1,1,1,0,1,1,1,1,0,0,1,1,1,0,0,
2,3,3,3,3,0,2,2,0,2,1,0,0,0,1,1,1,2,0,2,0,0,0,3,0,0,0,0,2,0,2,2,
1,1,1,2,1,2,1,1,2,2,2,1,2,0,1,1,1,0,1,1,1,1,0,2,1,0,0,0,1,1,0,0,
2,3,3,3,3,0,2,1,0,0,2,0,0,0,0,0,1,2,0,2,0,0,0,0,0,0,0,0,2,0,1,2,
1,1,1,2,1,1,1,1,2,2,2,0,1,0,1,1,1,0,0,1,1,1,0,0,1,0,0,0,0,1,0,0,
3,3,2,2,3,0,1,0,1,0,0,0,0,0,0,0,1,1,0,3,0,0,0,0,0,0,0,0,1,0,2,2,
1,1,1,1,1,2,1,1,2,2,1,2,2,1,0,1,1,1,1,1,0,1,0,0,1,0,0,0,1,1,0,0,
3,1,0,1,0,2,2,2,2,3,2,1,1,1,2,3,0,0,1,0,2,1,1,0,1,1,1,1,2,1,1,1,
1,2,2,1,2,1,2,2,1,1,0,1,2,1,2,2,1,1,1,0,0,1,1,1,2,1,0,1,0,0,0,0,
2,1,0,1,0,3,1,2,2,2,2,1,2,2,1,1,1,0,2,1,2,2,1,1,2,1,1,0,2,1,1,1,
1,2,2,2,2,2,2,2,1,2,0,1,1,0,2,1,1,1,1,1,0,0,1,1,1,1,0,1,0,0,0,0,
2,1,1,1,1,2,2,2,2,1,2,2,2,1,2,2,1,1,2,1,2,3,2,2,1,1,1,1,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,2,2,3,2,0,1,2,0,1,2,1,1,0,1,0,1,2,1,2,0,0,0,1,1,0,0,0,1,0,0,2,
1,1,0,0,1,1,0,1,1,1,1,0,2,0,1,1,1,0,0,1,1,0,0,0,0,1,0,0,0,1,0,0,
2,0,0,0,0,1,2,2,2,2,2,2,2,1,2,1,1,1,1,1,1,1,0,1,1,1,1,1,2,1,1,1,
1,2,2,2,2,1,1,2,1,2,1,1,1,0,2,1,2,1,1,1,0,2,1,1,1,1,0,1,0,0,0,0,
3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,
1,1,0,1,0,1,1,1,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,2,2,3,2,0,0,0,0,1,0,0,0,0,0,0,1,1,0,2,0,0,0,0,0,0,0,0,1,0,1,2,
1,1,1,1,1,1,0,0,2,2,2,2,2,0,1,1,0,1,1,1,1,1,0,0,1,0,0,0,1,1,0,1,
2,3,1,2,1,0,1,1,0,2,2,2,0,0,1,0,0,1,1,1,1,0,0,0,0,0,0,0,1,0,1,2,
1,1,1,1,2,1,1,1,1,1,1,1,1,0,1,1,0,1,0,1,0,1,0,0,1,0,0,0,0,1,0,0,
2,2,2,2,2,0,0,2,0,0,2,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,2,0,2,2,
1,1,1,1,1,0,0,1,2,1,1,0,1,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,
1,2,2,2,2,0,0,2,0,1,1,0,0,0,1,0,0,2,0,2,0,0,0,0,0,0,0,0,0,0,1,1,
0,0,0,1,1,1,1,1,1,1,1,1,1,0,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,
1,2,2,3,2,0,0,1,0,0,1,0,0,0,0,0,0,1,0,2,0,0,0,1,0,0,0,0,0,0,0,2,
1,1,0,0,1,0,0,0,1,1,0,0,1,0,1,1,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,
2,1,2,2,2,1,2,1,2,2,1,1,2,1,1,1,0,1,1,1,1,2,0,1,0,1,1,1,1,0,1,1,
1,1,2,1,1,1,1,1,1,0,0,1,2,1,1,1,1,1,1,0,0,1,1,1,0,0,0,0,0,0,0,0,
1,0,0,1,3,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,2,2,2,1,0,0,1,0,2,0,0,0,0,0,1,1,1,0,1,0,0,0,0,0,0,0,0,2,0,0,1,
0,2,0,1,0,0,1,1,2,0,1,0,1,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,
1,2,2,2,2,0,1,1,0,2,1,0,1,1,1,0,0,1,0,2,0,1,0,0,0,0,0,0,0,0,0,1,
0,1,0,0,1,0,0,0,1,1,0,0,1,0,0,1,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,
2,2,2,2,2,0,0,1,0,0,0,1,0,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,1,
0,1,0,1,1,1,0,0,1,1,1,0,1,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,
2,0,1,0,0,1,2,1,1,1,1,1,1,2,2,1,0,0,1,0,1,0,0,0,0,1,1,1,1,0,0,0,
1,1,2,1,1,1,1,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,2,1,2,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,0,0,0,1,
0,0,0,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,0,0,1,2,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,
0,1,1,0,1,1,1,0,0,1,0,0,1,0,1,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,
1,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,2,0,0,2,0,1,0,0,1,0,0,1,
1,1,0,0,1,1,0,1,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,
1,1,1,1,1,1,1,2,0,0,0,0,0,0,2,1,0,1,1,0,0,1,1,1,0,1,0,0,0,0,0,0,
2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,0,1,1,0,1,1,1,1,1,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
)
Latin5BulgarianModel = {
'charToOrderMap': Latin5_BulgarianCharToOrderMap,
'precedenceMatrix': BulgarianLangModel,
'mTypicalPositiveRatio': 0.969392,
'keepEnglishLetter': False,
'charsetName': "ISO-8859-5"
}
Win1251BulgarianModel = {
'charToOrderMap': win1251BulgarianCharToOrderMap,
'precedenceMatrix': BulgarianLangModel,
'mTypicalPositiveRatio': 0.969392,
'keepEnglishLetter': False,
'charsetName': "windows-1251"
}
# flake8: noqa
|
apache-2.0
|
GoogleChrome/big-rig
|
app/src/thirdparty/telemetry/internal/actions/mouse_click_unittest.py
|
31
|
1617
|
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.core import exceptions
from telemetry.internal.actions import mouse_click
from telemetry.testing import tab_test_case
class MouseClickActionTest(tab_test_case.TabTestCase):
def testMouseClickAction(self):
self.Navigate('blank.html')
self._tab.ExecuteJavaScript("""
(function() {
function createElement(id, textContent) {
var el = document.createElement("div");
el.id = id;
el.textContent = textContent;
document.body.appendChild(el);
}
createElement('test-1', 'foo');
})();""")
i = mouse_click.MouseClickAction(selector='#test-1')
i.WillRunAction(self._tab)
i.RunAction(self._tab)
self.assertTrue(self._tab.EvaluateJavaScript(
'window.__mouseClickActionDone'))
def testMouseClickActionOnNonExistingElement(self):
self.Navigate('blank.html')
self._tab.ExecuteJavaScript("""
(function() {
function createElement(id, textContent) {
var el = document.createElement("div");
el.id = id;
el.textContent = textContent;
document.body.appendChild(el);
}
createElement('test-1', 'foo');
})();""")
i = mouse_click.MouseClickAction(selector='#test-2')
i.WillRunAction(self._tab)
def WillFail():
i.RunAction(self._tab)
self.assertRaises(exceptions.EvaluateException, WillFail)
|
apache-2.0
|
guschmue/tensorflow
|
tensorflow/contrib/bayesflow/python/kernel_tests/layers_dense_variational_test.py
|
5
|
11390
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for dense Bayesian layers."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib.bayesflow.python.ops import layers_dense_variational_impl as prob_layers_lib
from tensorflow.python.framework import ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.ops.distributions import normal as normal_lib
from tensorflow.python.platform import test
class Counter(object):
"""Helper class to manage incrementing a counting `int`."""
def __init__(self):
self._value = -1
@property
def value(self):
return self._value
def __call__(self):
self._value += 1
return self._value
class MockDistribution(normal_lib.Normal):
"""Monitors DenseVariational calls to the underlying distribution."""
def __init__(self, result_sample, result_log_prob, loc=None, scale=None):
self.result_sample = result_sample
self.result_log_prob = result_log_prob
self.result_loc = loc
self.result_scale = scale
self.called_log_prob = Counter()
self.called_sample = Counter()
self.called_loc = Counter()
self.called_scale = Counter()
def log_prob(self, *args, **kwargs):
self.called_log_prob()
return self.result_log_prob
def sample(self, *args, **kwargs):
self.called_sample()
return self.result_sample
@property
def loc(self):
self.called_loc()
return self.result_loc
@property
def scale(self):
self.called_scale()
return self.result_scale
class MockKLDivergence(object):
"""Monitors DenseVariational calls to the divergence implementation."""
def __init__(self, result):
self.result = result
self.args = []
self.called = Counter()
def __call__(self, *args, **kwargs):
self.called()
self.args.append(args)
return self.result
class DenseVariationalLocalReparametrization(test.TestCase):
def testKLPenaltyKernel(self):
with self.test_session():
dense_vi = prob_layers_lib.DenseVariational(units=2)
inputs = random_ops.random_uniform([2, 3], seed=1)
# No keys.
loss_keys = ops.get_collection(ops.GraphKeys.REGULARIZATION_LOSSES)
self.assertEqual(len(loss_keys), 0)
self.assertListEqual(dense_vi.losses, loss_keys)
_ = dense_vi(inputs)
# Yes keys.
loss_keys = ops.get_collection(ops.GraphKeys.REGULARIZATION_LOSSES)
self.assertEqual(len(loss_keys), 1)
self.assertListEqual(dense_vi.losses, loss_keys)
def testKLPenaltyBoth(self):
def _make_normal(dtype, *args): # pylint: disable=unused-argument
return normal_lib.Normal(
loc=dtype.as_numpy_dtype(0.), scale=dtype.as_numpy_dtype(1.))
with self.test_session():
dense_vi = prob_layers_lib.DenseVariational(
units=2,
bias_posterior_fn=prob_layers_lib.default_mean_field_normal_fn(),
bias_prior_fn=_make_normal)
inputs = random_ops.random_uniform([2, 3], seed=1)
# No keys.
loss_keys = ops.get_collection(ops.GraphKeys.REGULARIZATION_LOSSES)
self.assertEqual(len(loss_keys), 0)
self.assertListEqual(dense_vi.losses, loss_keys)
_ = dense_vi(inputs)
# Yes keys.
loss_keys = ops.get_collection(ops.GraphKeys.REGULARIZATION_LOSSES)
self.assertEqual(len(loss_keys), 2)
self.assertListEqual(dense_vi.losses, loss_keys)
def testVariationalNonLocal(self):
batch_size, in_size, out_size = 2, 3, 4
with self.test_session() as sess:
seed = Counter()
inputs = random_ops.random_uniform([batch_size, in_size], seed=seed())
kernel_size = [in_size, out_size]
kernel_posterior = MockDistribution(
result_log_prob=random_ops.random_uniform(kernel_size, seed=seed()),
result_sample=random_ops.random_uniform(kernel_size, seed=seed()))
kernel_prior = MockDistribution(
result_log_prob=random_ops.random_uniform(kernel_size, seed=seed()),
result_sample=random_ops.random_uniform(kernel_size, seed=seed()))
kernel_divergence = MockKLDivergence(
result=random_ops.random_uniform(kernel_size, seed=seed()))
bias_size = [out_size]
bias_posterior = MockDistribution(
result_log_prob=random_ops.random_uniform(bias_size, seed=seed()),
result_sample=random_ops.random_uniform(bias_size, seed=seed()))
bias_prior = MockDistribution(
result_log_prob=random_ops.random_uniform(bias_size, seed=seed()),
result_sample=random_ops.random_uniform(bias_size, seed=seed()))
bias_divergence = MockKLDivergence(
result=random_ops.random_uniform(bias_size, seed=seed()))
expected_outputs = (
math_ops.matmul(inputs, kernel_posterior.result_sample) +
bias_posterior.result_sample)
dense_vi = prob_layers_lib.DenseVariational(
units=2,
kernel_use_local_reparameterization=False,
kernel_posterior_fn=lambda *args: kernel_posterior,
kernel_posterior_tensor_fn=lambda d: d.sample(seed=42),
kernel_prior_fn=lambda *args: kernel_prior,
kernel_divergence_fn=kernel_divergence,
bias_posterior_fn=lambda *args: bias_posterior,
bias_posterior_tensor_fn=lambda d: d.sample(seed=43),
bias_prior_fn=lambda *args: bias_prior,
bias_divergence_fn=bias_divergence)
outputs = dense_vi(inputs)
kl_penalty = ops.get_collection(ops.GraphKeys.REGULARIZATION_LOSSES)
[
expected_outputs_, actual_outputs_,
expected_kernel_, actual_kernel_,
expected_kernel_divergence_, actual_kernel_divergence_,
expected_bias_, actual_bias_,
expected_bias_divergence_, actual_bias_divergence_,
] = sess.run([
expected_outputs, outputs,
kernel_posterior.result_sample, dense_vi.kernel.posterior_tensor,
kernel_divergence.result, kl_penalty[0],
bias_posterior.result_sample, dense_vi.bias.posterior_tensor,
bias_divergence.result, kl_penalty[1],
])
self.assertAllClose(
expected_kernel_, actual_kernel_,
rtol=1e-6, atol=0.)
self.assertAllClose(
expected_bias_, actual_bias_,
rtol=1e-6, atol=0.)
self.assertAllClose(
expected_outputs_, actual_outputs_,
rtol=1e-6, atol=0.)
self.assertAllClose(
expected_kernel_divergence_, actual_kernel_divergence_,
rtol=1e-6, atol=0.)
self.assertAllClose(
expected_bias_divergence_, actual_bias_divergence_,
rtol=1e-6, atol=0.)
self.assertAllEqual(
[[kernel_posterior, kernel_prior, kernel_posterior.result_sample]],
kernel_divergence.args)
self.assertAllEqual(
[[bias_posterior, bias_prior, bias_posterior.result_sample]],
bias_divergence.args)
def testVariationalLocal(self):
batch_size, in_size, out_size = 2, 3, 4
with self.test_session() as sess:
seed = Counter()
inputs = random_ops.random_uniform([batch_size, in_size], seed=seed())
kernel_size = [in_size, out_size]
kernel_posterior = MockDistribution(
loc=random_ops.random_uniform(kernel_size, seed=seed()),
scale=random_ops.random_uniform(kernel_size, seed=seed()),
result_log_prob=random_ops.random_uniform(kernel_size, seed=seed()),
result_sample=random_ops.random_uniform(kernel_size, seed=seed()))
kernel_prior = MockDistribution(
result_log_prob=random_ops.random_uniform(kernel_size, seed=seed()),
result_sample=random_ops.random_uniform(kernel_size, seed=seed()))
kernel_divergence = MockKLDivergence(
result=random_ops.random_uniform(kernel_size, seed=seed()))
bias_size = [out_size]
bias_posterior = MockDistribution(
result_log_prob=random_ops.random_uniform(bias_size, seed=seed()),
result_sample=random_ops.random_uniform(bias_size, seed=seed()))
bias_prior = MockDistribution(
result_log_prob=random_ops.random_uniform(bias_size, seed=seed()),
result_sample=random_ops.random_uniform(bias_size, seed=seed()))
bias_divergence = MockKLDivergence(
result=random_ops.random_uniform(bias_size, seed=seed()))
expected_kernel_posterior_affine = normal_lib.Normal(
loc=math_ops.matmul(inputs, kernel_posterior.result_loc),
scale=math_ops.matmul(
inputs**2., kernel_posterior.result_scale**2)**0.5)
expected_kernel_posterior_affine_tensor = (
expected_kernel_posterior_affine.sample(seed=42))
expected_outputs = (expected_kernel_posterior_affine_tensor +
bias_posterior.result_sample)
dense_vi = prob_layers_lib.DenseVariational(
units=2,
kernel_use_local_reparameterization=True,
kernel_posterior_fn=lambda *args: kernel_posterior,
kernel_posterior_tensor_fn=lambda d: d.sample(seed=42),
kernel_prior_fn=lambda *args: kernel_prior,
kernel_divergence_fn=kernel_divergence,
bias_posterior_fn=lambda *args: bias_posterior,
bias_posterior_tensor_fn=lambda d: d.sample(seed=43),
bias_prior_fn=lambda *args: bias_prior,
bias_divergence_fn=bias_divergence)
outputs = dense_vi(inputs)
kl_penalty = ops.get_collection(ops.GraphKeys.REGULARIZATION_LOSSES)
[
expected_outputs_, actual_outputs_,
expected_kernel_divergence_, actual_kernel_divergence_,
expected_bias_, actual_bias_,
expected_bias_divergence_, actual_bias_divergence_,
] = sess.run([
expected_outputs, outputs,
kernel_divergence.result, kl_penalty[0],
bias_posterior.result_sample, dense_vi.bias.posterior_tensor,
bias_divergence.result, kl_penalty[1],
])
self.assertAllClose(
expected_bias_, actual_bias_,
rtol=1e-6, atol=0.)
self.assertAllClose(
expected_outputs_, actual_outputs_,
rtol=1e-6, atol=0.)
self.assertAllClose(
expected_kernel_divergence_, actual_kernel_divergence_,
rtol=1e-6, atol=0.)
self.assertAllClose(
expected_bias_divergence_, actual_bias_divergence_,
rtol=1e-6, atol=0.)
self.assertAllEqual(
[[kernel_posterior, kernel_prior, None]],
kernel_divergence.args)
self.assertAllEqual(
[[bias_posterior, bias_prior, bias_posterior.result_sample]],
bias_divergence.args)
if __name__ == "__main__":
test.main()
|
apache-2.0
|
plq/spyne
|
spyne/model/primitive/__init__.py
|
2
|
7308
|
#
# spyne - Copyright (C) Spyne contributors.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301
#
NATIVE_MAP = {}
string_encoding = 'UTF-8' # ???
from spyne.model.primitive._base import Any
from spyne.model.primitive._base import AnyDict
from spyne.model.primitive._base import AnyHtml
from spyne.model.primitive._base import AnyXml
from spyne.model.primitive._base import Boolean
from spyne.model.primitive.string import Unicode
from spyne.model.primitive.string import String
from spyne.model.primitive.string import AnyUri
from spyne.model.primitive.string import Uuid
from spyne.model.primitive.string import ImageUri
from spyne.model.primitive.string import Ltree
from spyne.model.primitive.string import MimeType
from spyne.model.primitive.string import MimeTypeStrict
from spyne.model.primitive.string import MediaType
from spyne.model.primitive.string import MediaTypeStrict
from spyne.model.primitive.xml import ID
from spyne.model.primitive.xml import Token
from spyne.model.primitive.xml import NMToken
from spyne.model.primitive.xml import Name
from spyne.model.primitive.xml import NCName
from spyne.model.primitive.xml import QName
from spyne.model.primitive.xml import Language
from spyne.model.primitive.xml import NormalizedString
from spyne.model.primitive.spatial import Point
from spyne.model.primitive.spatial import Line
from spyne.model.primitive.spatial import LineString
from spyne.model.primitive.spatial import Polygon
from spyne.model.primitive.spatial import MultiPoint
from spyne.model.primitive.spatial import MultiLine
from spyne.model.primitive.spatial import MultiLineString
from spyne.model.primitive.spatial import MultiPolygon
# Date/Time types
from spyne.model.primitive.datetime import Date
from spyne.model.primitive.datetime import DateTime
from spyne.model.primitive.datetime import Duration
from spyne.model.primitive.datetime import Time
# Numbers
from spyne.model.primitive.number import Decimal
from spyne.model.primitive.number import Double
from spyne.model.primitive.number import Float
from spyne.model.primitive.number import Integer8
from spyne.model.primitive.number import Byte
from spyne.model.primitive.number import Integer16
from spyne.model.primitive.number import Short
from spyne.model.primitive.number import Integer32
from spyne.model.primitive.number import Int
from spyne.model.primitive.number import Integer64
from spyne.model.primitive.number import Long
from spyne.model.primitive.number import Integer
from spyne.model.primitive.number import NumberLimitsWarning
from spyne.model.primitive.number import UnsignedInteger8
from spyne.model.primitive.number import UnsignedByte
from spyne.model.primitive.number import UnsignedInteger16
from spyne.model.primitive.number import UnsignedShort
from spyne.model.primitive.number import UnsignedInteger32
from spyne.model.primitive.number import UnsignedInt
from spyne.model.primitive.number import UnsignedInteger64
from spyne.model.primitive.number import UnsignedLong
from spyne.model.primitive.number import NonNegativeInteger # Xml Schema calls it so
from spyne.model.primitive.number import UnsignedInteger
from spyne.model.primitive.network import MacAddress
from spyne.model.primitive.network import IpAddress
from spyne.model.primitive.network import Ipv4Address
from spyne.model.primitive.network import Ipv6Address
# This class is DEPRECATED. Use the spyne.model.Mandatory like this:
# >>> from spyne.model import Mandatory as M, Unicode
# >>> MandatoryEmail = M(Unicode(pattern='[^@]+@[^@]+'))
class Mandatory:
Unicode = Unicode(type_name="MandatoryString", min_occurs=1, nillable=False, min_len=1)
String = String(type_name="MandatoryString", min_occurs=1, nillable=False, min_len=1)
AnyXml = AnyXml(type_name="MandatoryXml", min_occurs=1, nillable=False)
AnyDict = AnyDict(type_name="MandatoryDict", min_occurs=1, nillable=False)
AnyUri = AnyUri(type_name="MandatoryUri", min_occurs=1, nillable=False, min_len=1)
ImageUri = ImageUri(type_name="MandatoryImageUri", min_occurs=1, nillable=False, min_len=1)
Boolean = Boolean(type_name="MandatoryBoolean", min_occurs=1, nillable=False)
Date = Date(type_name="MandatoryDate", min_occurs=1, nillable=False)
Time = Time(type_name="MandatoryTime", min_occurs=1, nillable=False)
DateTime = DateTime(type_name="MandatoryDateTime", min_occurs=1, nillable=False)
Duration = Duration(type_name="MandatoryDuration", min_occurs=1, nillable=False)
Decimal = Decimal(type_name="MandatoryDecimal", min_occurs=1, nillable=False)
Double = Double(type_name="MandatoryDouble", min_occurs=1, nillable=False)
Float = Float(type_name="MandatoryFloat", min_occurs=1, nillable=False)
Integer = Integer(type_name="MandatoryInteger", min_occurs=1, nillable=False)
Integer64 = Integer64(type_name="MandatoryLong", min_occurs=1, nillable=False)
Integer32 = Integer32(type_name="MandatoryInt", min_occurs=1, nillable=False)
Integer16 = Integer16(type_name="MandatoryShort", min_occurs=1, nillable=False)
Integer8 = Integer8(type_name="MandatoryByte", min_occurs=1, nillable=False)
Long = Integer64
Int = Integer32
Short = Integer16
Byte = Integer8
UnsignedInteger = UnsignedInteger(type_name="MandatoryUnsignedInteger", min_occurs=1, nillable=False)
UnsignedInteger64 = UnsignedInteger64(type_name="MandatoryUnsignedLong", min_occurs=1, nillable=False)
UnsignedInteger32 = UnsignedInteger32(type_name="MandatoryUnsignedInt", min_occurs=1, nillable=False)
UnsignedInteger16 = UnsignedInteger16(type_name="MandatoryUnsignedShort", min_occurs=1, nillable=False)
UnsignedInteger8 = UnsignedInteger8(type_name="MandatoryUnsignedByte", min_occurs=1, nillable=False)
UnsignedLong = UnsignedInteger64
UnsignedInt = UnsignedInteger32
UnsignedShort = UnsignedInteger16
UnsignedByte = UnsignedInteger8
Uuid = Uuid(type_name="MandatoryUuid", min_len=1, min_occurs=1, nillable=False)
Point = Point(type_name="Point", min_len=1, min_occurs=1, nillable=False)
Line = Line(type_name="LineString", min_len=1, min_occurs=1, nillable=False)
LineString = Line
Polygon = Polygon(type_name="Polygon", min_len=1, min_occurs=1, nillable=False)
MultiPoint = MultiPoint(type_name="MandatoryMultiPoint", min_len=1, min_occurs=1, nillable=False)
MultiLine = MultiLine(type_name="MandatoryMultiLineString", min_len=1, min_occurs=1, nillable=False)
MultiLineString = MultiLine
MultiPolygon = MultiPolygon(type_name="MandatoryMultiPolygon", min_len=1, min_occurs=1, nillable=False)
assert Mandatory.Long == Mandatory.Integer64
|
lgpl-2.1
|
DoubleCiti/daimaduan.com
|
daimaduan/migrations/20160318173601.py
|
1
|
1459
|
# coding: utf-8
import hashlib
import time
from mongodb_migrations.base import BaseMigration
class Migration(BaseMigration):
def upgrade(self):
"""
Add default bookmark to every user
:return:
"""
user_ids = [str(i['user']) for i in self.db.bookmark.find()]
users = self.db.user.find()
for user in users:
if str(user['_id']) not in user_ids:
self.db.bookmark.save({'user': user['_id'],
'title': u'%s 的收藏夹' % user['username'],
'description': '',
'hash_id': self.create_hash_id(user['salt'], 'bookmark'),
'created_at': user['created_at'],
'updated_at': user['created_at'],
'pastes': [],
'is_private': False,
'is_default': True})
def create_hash_id(self, salt, string):
def generate_hash_id():
return hashlib.sha1('%s%s%s' % (salt, string, str(time.time()))).hexdigest()[:11]
hash_id = generate_hash_id()
while(self.db.bookmark.find_one({'hash_id': hash_id}) is not None):
hash_id = generate_hash_id()
return hash_id
def downgrade(self):
print "I'm in downgrade - migration2"
|
bsd-3-clause
|
gauravbose/digital-menu
|
django/core/management/commands/dbshell.py
|
467
|
1192
|
from django.core.management.base import BaseCommand, CommandError
from django.db import DEFAULT_DB_ALIAS, connections
class Command(BaseCommand):
help = ("Runs the command-line client for specified database, or the "
"default database if none is provided.")
requires_system_checks = False
def add_arguments(self, parser):
parser.add_argument('--database', action='store', dest='database',
default=DEFAULT_DB_ALIAS, help='Nominates a database onto which to '
'open a shell. Defaults to the "default" database.')
def handle(self, **options):
connection = connections[options.get('database')]
try:
connection.client.runshell()
except OSError:
# Note that we're assuming OSError means that the client program
# isn't installed. There's a possibility OSError would be raised
# for some other reason, in which case this error message would be
# inaccurate. Still, this message catches the common case.
raise CommandError('You appear not to have the %r program installed or on your path.' %
connection.client.executable_name)
|
bsd-3-clause
|
kun--hust/libcloud_with_cn
|
libcloud/test/compute/test_ssh_client.py
|
17
|
11868
|
# -*- coding: utf-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one or more§
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from __future__ import with_statement
import os
import sys
import tempfile
from libcloud import _init_once
from libcloud.test import LibcloudTestCase
from libcloud.test import unittest
from libcloud.compute.ssh import ParamikoSSHClient
from libcloud.compute.ssh import ShellOutSSHClient
from libcloud.compute.ssh import have_paramiko
from libcloud.utils.py3 import StringIO
from mock import patch, Mock
if not have_paramiko:
ParamikoSSHClient = None # NOQA
else:
import paramiko
class ParamikoSSHClientTests(LibcloudTestCase):
@patch('paramiko.SSHClient', Mock)
def setUp(self):
"""
Creates the object patching the actual connection.
"""
conn_params = {'hostname': 'dummy.host.org',
'port': 8822,
'username': 'ubuntu',
'key': '~/.ssh/ubuntu_ssh',
'timeout': '600'}
_, self.tmp_file = tempfile.mkstemp()
os.environ['LIBCLOUD_DEBUG'] = self.tmp_file
_init_once()
self.ssh_cli = ParamikoSSHClient(**conn_params)
@patch('paramiko.SSHClient', Mock)
def test_create_with_password(self):
conn_params = {'hostname': 'dummy.host.org',
'username': 'ubuntu',
'password': 'ubuntu'}
mock = ParamikoSSHClient(**conn_params)
mock.connect()
expected_conn = {'username': 'ubuntu',
'password': 'ubuntu',
'allow_agent': False,
'hostname': 'dummy.host.org',
'look_for_keys': False,
'port': 22}
mock.client.connect.assert_called_once_with(**expected_conn)
self.assertLogMsg('Connecting to server')
@patch('paramiko.SSHClient', Mock)
def test_deprecated_key_argument(self):
conn_params = {'hostname': 'dummy.host.org',
'username': 'ubuntu',
'key': 'id_rsa'}
mock = ParamikoSSHClient(**conn_params)
mock.connect()
expected_conn = {'username': 'ubuntu',
'allow_agent': False,
'hostname': 'dummy.host.org',
'look_for_keys': False,
'key_filename': 'id_rsa',
'port': 22}
mock.client.connect.assert_called_once_with(**expected_conn)
self.assertLogMsg('Connecting to server')
def test_key_files_and_key_material_arguments_are_mutual_exclusive(self):
conn_params = {'hostname': 'dummy.host.org',
'username': 'ubuntu',
'key_files': 'id_rsa',
'key_material': 'key'}
expected_msg = ('key_files and key_material arguments are mutually '
'exclusive')
self.assertRaisesRegexp(ValueError, expected_msg,
ParamikoSSHClient, **conn_params)
@patch('paramiko.SSHClient', Mock)
def test_key_material_argument(self):
path = os.path.join(os.path.dirname(__file__),
'fixtures', 'misc', 'dummy_rsa')
with open(path, 'r') as fp:
private_key = fp.read()
conn_params = {'hostname': 'dummy.host.org',
'username': 'ubuntu',
'key_material': private_key}
mock = ParamikoSSHClient(**conn_params)
mock.connect()
pkey = paramiko.RSAKey.from_private_key(StringIO(private_key))
expected_conn = {'username': 'ubuntu',
'allow_agent': False,
'hostname': 'dummy.host.org',
'look_for_keys': False,
'pkey': pkey,
'port': 22}
mock.client.connect.assert_called_once_with(**expected_conn)
self.assertLogMsg('Connecting to server')
@patch('paramiko.SSHClient', Mock)
def test_key_material_argument_invalid_key(self):
conn_params = {'hostname': 'dummy.host.org',
'username': 'ubuntu',
'key_material': 'id_rsa'}
mock = ParamikoSSHClient(**conn_params)
expected_msg = 'Invalid or unsupported key type'
self.assertRaisesRegexp(paramiko.ssh_exception.SSHException,
expected_msg, mock.connect)
@patch('paramiko.SSHClient', Mock)
def test_create_with_key(self):
conn_params = {'hostname': 'dummy.host.org',
'username': 'ubuntu',
'key_files': 'id_rsa'}
mock = ParamikoSSHClient(**conn_params)
mock.connect()
expected_conn = {'username': 'ubuntu',
'allow_agent': False,
'hostname': 'dummy.host.org',
'look_for_keys': False,
'key_filename': 'id_rsa',
'port': 22}
mock.client.connect.assert_called_once_with(**expected_conn)
self.assertLogMsg('Connecting to server')
@patch('paramiko.SSHClient', Mock)
def test_create_with_password_and_key(self):
conn_params = {'hostname': 'dummy.host.org',
'username': 'ubuntu',
'password': 'ubuntu',
'key': 'id_rsa'}
mock = ParamikoSSHClient(**conn_params)
mock.connect()
expected_conn = {'username': 'ubuntu',
'password': 'ubuntu',
'allow_agent': False,
'hostname': 'dummy.host.org',
'look_for_keys': False,
'key_filename': 'id_rsa',
'port': 22}
mock.client.connect.assert_called_once_with(**expected_conn)
self.assertLogMsg('Connecting to server')
@patch('paramiko.SSHClient', Mock)
def test_create_without_credentials(self):
"""
Initialize object with no credentials.
Just to have better coverage, initialize the object
without 'password' neither 'key'.
"""
conn_params = {'hostname': 'dummy.host.org',
'username': 'ubuntu'}
mock = ParamikoSSHClient(**conn_params)
mock.connect()
expected_conn = {'username': 'ubuntu',
'hostname': 'dummy.host.org',
'allow_agent': True,
'look_for_keys': True,
'port': 22}
mock.client.connect.assert_called_once_with(**expected_conn)
def test_basic_usage_absolute_path(self):
"""
Basic execution.
"""
mock = self.ssh_cli
# script to execute
sd = "/root/random_script.sh"
# Connect behavior
mock.connect()
mock_cli = mock.client # The actual mocked object: SSHClient
expected_conn = {'username': 'ubuntu',
'key_filename': '~/.ssh/ubuntu_ssh',
'allow_agent': False,
'hostname': 'dummy.host.org',
'look_for_keys': False,
'timeout': '600',
'port': 8822}
mock_cli.connect.assert_called_once_with(**expected_conn)
mock.put(sd)
# Make assertions over 'put' method
mock_cli.open_sftp().chdir.assert_called_with('root')
mock_cli.open_sftp().file.assert_called_once_with('random_script.sh',
mode='w')
mock.run(sd)
# Make assertions over 'run' method
mock_cli.get_transport().open_session().exec_command \
.assert_called_once_with(sd)
self.assertLogMsg('Executing command (cmd=/root/random_script.sh)')
self.assertLogMsg('Command finished')
mock.close()
def test_delete_script(self):
"""
Provide a basic test with 'delete' action.
"""
mock = self.ssh_cli
# script to execute
sd = '/root/random_script.sh'
mock.connect()
mock.delete(sd)
# Make assertions over the 'delete' method
mock.client.open_sftp().unlink.assert_called_with(sd)
self.assertLogMsg('Deleting file')
mock.close()
self.assertLogMsg('Closing server connection')
def assertLogMsg(self, expected_msg):
with open(self.tmp_file, 'r') as fp:
content = fp.read()
self.assertTrue(content.find(expected_msg) != -1)
if not ParamikoSSHClient:
class ParamikoSSHClientTests(LibcloudTestCase): # NOQA
pass
class ShellOutSSHClientTests(LibcloudTestCase):
def test_password_auth_not_supported(self):
try:
ShellOutSSHClient(hostname='localhost', username='foo',
password='bar')
except ValueError:
e = sys.exc_info()[1]
msg = str(e)
self.assertTrue('ShellOutSSHClient only supports key auth' in msg)
else:
self.fail('Exception was not thrown')
def test_ssh_executable_not_available(self):
class MockChild(object):
returncode = 127
def communicate(*args, **kwargs):
pass
def mock_popen(*args, **kwargs):
return MockChild()
with patch('subprocess.Popen', mock_popen):
try:
ShellOutSSHClient(hostname='localhost', username='foo')
except ValueError:
e = sys.exc_info()[1]
msg = str(e)
self.assertTrue('ssh client is not available' in msg)
else:
self.fail('Exception was not thrown')
def test_connect_success(self):
client = ShellOutSSHClient(hostname='localhost', username='root')
self.assertTrue(client.connect())
def test_close_success(self):
client = ShellOutSSHClient(hostname='localhost', username='root')
self.assertTrue(client.close())
def test_get_base_ssh_command(self):
client1 = ShellOutSSHClient(hostname='localhost', username='root')
client2 = ShellOutSSHClient(hostname='localhost', username='root',
key='/home/my.key')
client3 = ShellOutSSHClient(hostname='localhost', username='root',
key='/home/my.key', timeout=5)
cmd1 = client1._get_base_ssh_command()
cmd2 = client2._get_base_ssh_command()
cmd3 = client3._get_base_ssh_command()
self.assertEqual(cmd1, ['ssh', 'root@localhost'])
self.assertEqual(cmd2, ['ssh', '-i', '/home/my.key',
'root@localhost'])
self.assertEqual(cmd3, ['ssh', '-i', '/home/my.key',
'-oConnectTimeout=5', 'root@localhost'])
if __name__ == '__main__':
sys.exit(unittest.main())
|
apache-2.0
|
zverevalexei/trex-http-proxy
|
trex_client/external_libs/pyzmq-14.5.0/python3/cel59/32bit/zmq/eventloop/minitornado/platform/auto.py
|
50
|
1424
|
#!/usr/bin/env python
#
# Copyright 2011 Facebook
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Implementation of platform-specific functionality.
For each function or class described in `tornado.platform.interface`,
the appropriate platform-specific implementation exists in this module.
Most code that needs access to this functionality should do e.g.::
from tornado.platform.auto import set_close_exec
"""
from __future__ import absolute_import, division, print_function, with_statement
import os
if os.name == 'nt':
from .common import Waker
from .windows import set_close_exec
else:
from .posix import set_close_exec, Waker
try:
# monotime monkey-patches the time module to have a monotonic function
# in versions of python before 3.3.
import monotime
except ImportError:
pass
try:
from time import monotonic as monotonic_time
except ImportError:
monotonic_time = None
|
mit
|
BIMSBbioinfo/bioconda-recipes
|
recipes/biopet-bamstats/biopet-bamstats.py
|
72
|
3369
|
#!/usr/bin/env python
#
# Wrapper script for starting the biopet-bamstats JAR package
#
# This script is written for use with the Conda package manager and is copied
# from the peptide-shaker wrapper. Only the parameters are changed.
# (https://github.com/bioconda/bioconda-recipes/blob/master/recipes/peptide-shaker/peptide-shaker.py)
#
# This file was automatically generated by the sbt-bioconda plugin.
import os
import subprocess
import sys
import shutil
from os import access
from os import getenv
from os import X_OK
jar_file = 'BamStats-assembly-1.0.1.jar'
default_jvm_mem_opts = []
# !!! End of parameter section. No user-serviceable code below this line !!!
def real_dirname(path):
"""Return the symlink-resolved, canonicalized directory-portion of path."""
return os.path.dirname(os.path.realpath(path))
def java_executable():
"""Return the executable name of the Java interpreter."""
java_home = getenv('JAVA_HOME')
java_bin = os.path.join('bin', 'java')
if java_home and access(os.path.join(java_home, java_bin), X_OK):
return os.path.join(java_home, java_bin)
else:
return 'java'
def jvm_opts(argv):
"""Construct list of Java arguments based on our argument list.
The argument list passed in argv must not include the script name.
The return value is a 3-tuple lists of strings of the form:
(memory_options, prop_options, passthrough_options)
"""
mem_opts = []
prop_opts = []
pass_args = []
exec_dir = None
for arg in argv:
if arg.startswith('-D'):
prop_opts.append(arg)
elif arg.startswith('-XX'):
prop_opts.append(arg)
elif arg.startswith('-Xm'):
mem_opts.append(arg)
elif arg.startswith('--exec_dir='):
exec_dir = arg.split('=')[1].strip('"').strip("'")
if not os.path.exists(exec_dir):
shutil.copytree(real_dirname(sys.argv[0]), exec_dir, symlinks=False, ignore=None)
else:
pass_args.append(arg)
# In the original shell script the test coded below read:
# if [ "$jvm_mem_opts" == "" ] && [ -z ${_JAVA_OPTIONS+x} ]
# To reproduce the behaviour of the above shell code fragment
# it is important to explictly check for equality with None
# in the second condition, so a null envar value counts as True!
if mem_opts == [] and getenv('_JAVA_OPTIONS') is None:
mem_opts = default_jvm_mem_opts
return (mem_opts, prop_opts, pass_args, exec_dir)
def main():
"""
PeptideShaker updates files relative to the path of the jar file.
In a multiuser setting, the option --exec_dir="exec_dir"
can be used as the location for the peptide-shaker distribution.
If the exec_dir dies not exist,
we copy the jar file, lib, and resources to the exec_dir directory.
"""
java = java_executable()
(mem_opts, prop_opts, pass_args, exec_dir) = jvm_opts(sys.argv[1:])
jar_dir = exec_dir if exec_dir else real_dirname(sys.argv[0])
if pass_args != [] and pass_args[0].startswith('eu'):
jar_arg = '-cp'
else:
jar_arg = '-jar'
jar_path = os.path.join(jar_dir, jar_file)
java_args = [java] + mem_opts + prop_opts + [jar_arg] + [jar_path] + pass_args
sys.exit(subprocess.call(java_args))
if __name__ == '__main__':
main()
|
mit
|
abhitopia/tensorflow
|
tensorflow/tools/compatibility/tf_upgrade_test.py
|
48
|
6045
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tf upgrader."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import tempfile
import six
from tensorflow.python.framework import test_util
from tensorflow.python.platform import test as test_lib
from tensorflow.tools.compatibility import tf_upgrade
class TestUpgrade(test_util.TensorFlowTestCase):
"""Test various APIs that have been changed in 1.0.
We also test whether a converted file is executable. test_file_v0_11.py
aims to exhaustively test that API changes are convertible and actually
work when run with current TensorFlow.
"""
def _upgrade(self, old_file_text):
in_file = six.StringIO(old_file_text)
out_file = six.StringIO()
upgrader = tf_upgrade.TensorFlowCodeUpgrader()
count, report, errors = (
upgrader.process_opened_file("test.py", in_file,
"test_out.py", out_file))
return count, report, errors, out_file.getvalue()
def testParseError(self):
_, report, unused_errors, unused_new_text = self._upgrade(
"import tensorflow as tf\na + \n")
self.assertTrue(report.find("Failed to parse") != -1)
def testReport(self):
text = "tf.mul(a, b)\n"
_, report, unused_errors, unused_new_text = self._upgrade(text)
# This is not a complete test, but it is a sanity test that a report
# is generating information.
self.assertTrue(report.find("Renamed function `tf.mul` to `tf.multiply`"))
def testRename(self):
text = "tf.mul(a, tf.sub(b, c))\n"
_, unused_report, unused_errors, new_text = self._upgrade(text)
self.assertEqual(new_text, "tf.multiply(a, tf.subtract(b, c))\n")
def testRenamePack(self):
text = "tf.pack(a)\n"
_, unused_report, unused_errors, new_text = self._upgrade(text)
self.assertEqual(new_text, "tf.stack(a)\n")
text = "tf.unpack(a)\n"
_, unused_report, unused_errors, new_text = self._upgrade(text)
self.assertEqual(new_text, "tf.unstack(a)\n")
def testReorder(self):
text = "tf.concat(a, b)\ntf.split(a, b, c)\n"
_, unused_report, unused_errors, new_text = self._upgrade(text)
self.assertEqual(new_text, "tf.concat(axis=a, values=b)\n"
"tf.split(axis=a, num_or_size_splits=b, value=c)\n")
def testConcatReorderWithKeywordArgs(self):
text = "tf.concat(concat_dim=a, values=b)\n"
_, unused_report, unused_errors, new_text = self._upgrade(text)
self.assertEqual(new_text, "tf.concat(axis=a, values=b)\n")
text = "tf.concat(values=b, concat_dim=a)\n"
_, unused_report, unused_errors, new_text = self._upgrade(text)
self.assertEqual(new_text, "tf.concat(values=b, axis=a)\n")
text = "tf.concat(a, values=b)\n"
_, unused_report, unused_errors, new_text = self._upgrade(text)
self.assertEqual(new_text, "tf.concat(axis=a, values=b)\n")
def testConcatReorderNested(self):
text = "tf.concat(a, tf.concat(c, d))\n"
_, unused_report, unused_errors, new_text = self._upgrade(text)
self.assertEqual(
new_text, "tf.concat(axis=a, values=tf.concat(axis=c, values=d))\n")
def testInitializers(self):
text = ("tf.zeros_initializer;tf.zeros_initializer ()\n"
"tf.ones_initializer;tf.ones_initializer ()\n")
_, unused_report, unused_errors, new_text = self._upgrade(text)
self.assertEqual(
new_text, "tf.zeros_initializer();tf.zeros_initializer ()\n"
"tf.ones_initializer();tf.ones_initializer ()\n")
def testKeyword(self):
text = "tf.reduce_any(a, reduction_indices=[1, 2])\n"
_, unused_report, unused_errors, new_text = self._upgrade(text)
self.assertEqual(new_text, "tf.reduce_any(a, axis=[1, 2])\n")
def testComplexExpression(self):
text = "(foo + bar)[a].word()"
_ = self._upgrade(text)
def testReverse(self):
text = "tf.reverse(a, b)\n"
_, unused_report, errors, new_text = self._upgrade(text)
self.assertEqual(new_text, new_text)
self.assertEqual(errors, ["test.py:1: tf.reverse requires manual check."])
def testListComprehension(self):
def _test(input, output):
_, unused_report, errors, new_text = self._upgrade(input)
self.assertEqual(new_text, output)
_test("tf.concat(0, \t[x for x in y])\n",
"tf.concat(axis=0, \tvalues=[x for x in y])\n")
_test("tf.concat(0,[x for x in y])\n",
"tf.concat(axis=0,values=[x for x in y])\n")
_test("tf.concat(0,[\nx for x in y])\n",
"tf.concat(axis=0,values=[\nx for x in y])\n")
_test("tf.concat(0,[\n \tx for x in y])\n",
"tf.concat(axis=0,values=[\n \tx for x in y])\n")
# TODO(aselle): Explicitly not testing command line interface and process_tree
# for now, since this is a one off utility.
class TestUpgradeFiles(test_util.TensorFlowTestCase):
def testInplace(self):
"""Check to make sure we don't have a file system race."""
temp_file = tempfile.NamedTemporaryFile("w", delete=False)
original = "tf.mul(a, b)\n"
upgraded = "tf.multiply(a, b)\n"
temp_file.write(original)
temp_file.close()
upgrader = tf_upgrade.TensorFlowCodeUpgrader()
upgrader.process_file(temp_file.name, temp_file.name)
self.assertAllEqual(open(temp_file.name).read(), upgraded)
os.unlink(temp_file.name)
if __name__ == "__main__":
test_lib.main()
|
apache-2.0
|
GenericStudent/home-assistant
|
homeassistant/components/image/__init__.py
|
10
|
6882
|
"""The Picture integration."""
import asyncio
import logging
import pathlib
import secrets
import shutil
import typing
from PIL import Image, ImageOps, UnidentifiedImageError
from aiohttp import hdrs, web
from aiohttp.web_request import FileField
import voluptuous as vol
from homeassistant.components.http.static import CACHE_HEADERS
from homeassistant.components.http.view import HomeAssistantView
from homeassistant.const import CONF_ID
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers import collection
from homeassistant.helpers.storage import Store
import homeassistant.util.dt as dt_util
from .const import DOMAIN
_LOGGER = logging.getLogger(__name__)
STORAGE_KEY = DOMAIN
STORAGE_VERSION = 1
VALID_SIZES = {256, 512}
MAX_SIZE = 1024 * 1024 * 10
CREATE_FIELDS = {
vol.Required("file"): FileField,
}
UPDATE_FIELDS = {
vol.Optional("name"): vol.All(str, vol.Length(min=1)),
}
async def async_setup(hass: HomeAssistant, config: dict):
"""Set up the Image integration."""
image_dir = pathlib.Path(hass.config.path(DOMAIN))
hass.data[DOMAIN] = storage_collection = ImageStorageCollection(hass, image_dir)
await storage_collection.async_load()
collection.StorageCollectionWebsocket(
storage_collection,
DOMAIN,
DOMAIN,
CREATE_FIELDS,
UPDATE_FIELDS,
).async_setup(hass, create_create=False)
hass.http.register_view(ImageUploadView)
hass.http.register_view(ImageServeView(image_dir, storage_collection))
return True
class ImageStorageCollection(collection.StorageCollection):
"""Image collection stored in storage."""
CREATE_SCHEMA = vol.Schema(CREATE_FIELDS)
UPDATE_SCHEMA = vol.Schema(UPDATE_FIELDS)
def __init__(self, hass: HomeAssistant, image_dir: pathlib.Path) -> None:
"""Initialize media storage collection."""
super().__init__(
Store(hass, STORAGE_VERSION, STORAGE_KEY),
logging.getLogger(f"{__name__}.storage_collection"),
)
self.async_add_listener(self._change_listener)
self.image_dir = image_dir
async def _process_create_data(self, data: typing.Dict) -> typing.Dict:
"""Validate the config is valid."""
data = self.CREATE_SCHEMA(dict(data))
uploaded_file: FileField = data["file"]
if not uploaded_file.content_type.startswith("image/"):
raise vol.Invalid("Only images are allowed")
data[CONF_ID] = secrets.token_hex(16)
data["filesize"] = await self.hass.async_add_executor_job(self._move_data, data)
data["content_type"] = uploaded_file.content_type
data["name"] = uploaded_file.filename
data["uploaded_at"] = dt_util.utcnow().isoformat()
return data
def _move_data(self, data):
"""Move data."""
uploaded_file: FileField = data.pop("file")
# Verify we can read the image
try:
image = Image.open(uploaded_file.file)
except UnidentifiedImageError as err:
raise vol.Invalid("Unable to identify image file") from err
# Reset content
uploaded_file.file.seek(0)
media_folder: pathlib.Path = self.image_dir / data[CONF_ID]
media_folder.mkdir(parents=True)
media_file = media_folder / "original"
# Raises if path is no longer relative to the media dir
media_file.relative_to(media_folder)
_LOGGER.debug("Storing file %s", media_file)
with media_file.open("wb") as target:
shutil.copyfileobj(uploaded_file.file, target)
image.close()
return media_file.stat().st_size
@callback
def _get_suggested_id(self, info: typing.Dict) -> str:
"""Suggest an ID based on the config."""
return info[CONF_ID]
async def _update_data(self, data: dict, update_data: typing.Dict) -> typing.Dict:
"""Return a new updated data object."""
return {**data, **self.UPDATE_SCHEMA(update_data)}
async def _change_listener(self, change_type, item_id, data):
"""Handle change."""
if change_type != collection.CHANGE_REMOVED:
return
await self.hass.async_add_executor_job(shutil.rmtree, self.image_dir / item_id)
class ImageUploadView(HomeAssistantView):
"""View to upload images."""
url = "/api/image/upload"
name = "api:image:upload"
async def post(self, request):
"""Handle upload."""
# Increase max payload
request._client_max_size = MAX_SIZE # pylint: disable=protected-access
data = await request.post()
item = await request.app["hass"].data[DOMAIN].async_create_item(data)
return self.json(item)
class ImageServeView(HomeAssistantView):
"""View to download images."""
url = "/api/image/serve/{image_id}/{filename}"
name = "api:image:serve"
requires_auth = False
def __init__(
self, image_folder: pathlib.Path, image_collection: ImageStorageCollection
):
"""Initialize image serve view."""
self.transform_lock = asyncio.Lock()
self.image_folder = image_folder
self.image_collection = image_collection
async def get(self, request: web.Request, image_id: str, filename: str):
"""Serve image."""
image_size = filename.split("-", 1)[0]
try:
parts = image_size.split("x", 1)
width = int(parts[0])
height = int(parts[1])
except (ValueError, IndexError) as err:
raise web.HTTPBadRequest from err
if not width or width != height or width not in VALID_SIZES:
raise web.HTTPBadRequest
image_info = self.image_collection.data.get(image_id)
if image_info is None:
raise web.HTTPNotFound()
hass = request.app["hass"]
target_file = self.image_folder / image_id / f"{width}x{height}"
if not target_file.is_file():
async with self.transform_lock:
# Another check in case another request already finished it while waiting
if not target_file.is_file():
await hass.async_add_executor_job(
_generate_thumbnail,
self.image_folder / image_id / "original",
image_info["content_type"],
target_file,
(width, height),
)
return web.FileResponse(
target_file,
headers={**CACHE_HEADERS, hdrs.CONTENT_TYPE: image_info["content_type"]},
)
def _generate_thumbnail(original_path, content_type, target_path, target_size):
"""Generate a size."""
image = ImageOps.exif_transpose(Image.open(original_path))
image.thumbnail(target_size)
image.save(target_path, format=content_type.split("/", 1)[1])
|
apache-2.0
|
fgclaramonte/Odoo-addons
|
pos_order_categories/__openerp__.py
|
1
|
1761
|
# -*- encoding: utf-8 -*-
##############################################################################
# pos_order_categories
# Copyright (c) 2017 Francisco Manuel García Claramonte <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Pedidos de TPV clasificados por categorías de producto',
'version': '8.0.0.1.5',
'category': 'Tools',
'description': """
Consulta de pedidos por sesión de TPV
=====================================
Este módulo añade al modelo de Sesión de TPV la conuslta de todos los pedidos,
agrupados por las categorías de los productos vendidos durante toda la sesión.
Publicado bajo licencia AGPL-v3.
Copyright (c) 2017 Francisco Manuel García Claramonte
""",
'author': 'Francisco M. García Claramonte',
'website': 'http://www.garciac.ess',
'depends': [
'point_of_sale',
'product',
],
'data': [
'views/point_of_sale_view.xml',
'security/ir.model.access.csv',
],
"installable": True,
}
|
gpl-3.0
|
40223246/w16b_test
|
static/Brython3.1.3-20150514-095342/Lib/site-packages/pygame/rect.py
|
603
|
13689
|
#!/usr/bin/env python
'''Pygame object for storing rectangular coordinates.
'''
__docformat__ = 'restructuredtext'
__version__ = '$Id$'
import copy
#import SDL.video
import SDL
class _RectProxy:
'''Proxy for SDL_Rect that can handle negative size.'''
__slots__ = ['x', 'y', 'w', 'h']
def __init__(self, r):
if isinstance(r, SDL.SDL_Rect) or isinstance(r, Rect):
self.x = r.x
self.y = r.y
self.w = r.w
self.h = r.h
else:
self.x = r[0]
self.y = r[1]
self.w = r[2]
self.h = r[3]
def _get_as_parameter_(self):
return SDL.SDL_Rect(self.x, self.y, self.w, self.h)
_as_parameter_ = property(_get_as_parameter_)
class Rect:
__slots__ = ['_r']
def __init__(self, *args):
if len(args) == 1:
arg = args[0]
if isinstance(arg, Rect):
object.__setattr__(self, '_r', copy.copy(arg._r))
return
elif isinstance(arg, SDL.SDL_Rect):
object.__setattr__(self, '_r', copy.copy(arg))
return
elif hasattr(arg, 'rect'):
arg = arg.rect
if callable(arg):
arg = arg()
self.__init__(arg)
return
elif hasattr(arg, '__len__'):
args = arg
else:
raise TypeError('Argument must be rect style object')
if len(args) == 4:
if args[2] < 0 or args[3] < 0:
object.__setattr__(self, '_r', _RectProxy((int(args[0]),
int(args[1]),
int(args[2]),
int(args[3]))))
else:
object.__setattr__(self, '_r', SDL.SDL_Rect(int(args[0]),
int(args[1]),
int(args[2]),
int(args[3])))
elif len(args) == 2:
if args[1][0] < 0 or args[1][1] < 0:
object.__setattr__(self, '_r',
_RectProxy((int(args[0][0]),
int(args[0][1]),
int(args[1][0]),
int(args[1][1]))))
else:
object.__setattr__(self, '_r',
SDL.SDL_Rect(int(args[0][0]),
int(args[0][1]),
int(args[1][0]),
int(args[1][1])))
else:
raise TypeError('Argument must be rect style object')
def __copy__(self):
return Rect(self)
def __repr__(self):
return '<rect(%d, %d, %d, %d)>' % \
(self._r.x, self._r.y, self._r.w, self._r.h)
def __cmp__(self, *other):
other = _rect_from_object(other)
if self._r.x != other._r.x:
return cmp(self._r.x, other._r.x)
if self._r.y != other._r.y:
return cmp(self._r.y, other._r.y)
if self._r.w != other._r.w:
return cmp(self._r.w, other._r.w)
if self._r.h != other._r.h:
return cmp(self._r.h, other._r.h)
return 0
def __nonzero__(self):
return self._r.w != 0 and self._r.h != 0
def __getattr__(self, name):
if name == 'top':
return self._r.y
elif name == 'left':
return self._r.x
elif name == 'bottom':
return self._r.y + self._r.h
elif name == 'right':
return self._r.x + self._r.w
elif name == 'topleft':
return self._r.x, self._r.y
elif name == 'bottomleft':
return self._r.x, self._r.y + self._r.h
elif name == 'topright':
return self._r.x + self._r.w, self._r.y
elif name == 'bottomright':
return self._r.x + self._r.w, self._r.y + self._r.h
elif name == 'midtop':
return self._r.x + self._r.w / 2, self._r.y
elif name == 'midleft':
return self._r.x, self._r.y + self._r.h / 2
elif name == 'midbottom':
return self._r.x + self._r.w / 2, self._r.y + self._r.h
elif name == 'midright':
return self._r.x + self._r.w, self._r.y + self._r.h / 2
elif name == 'center':
return self._r.x + self._r.w / 2, self._r.y + self._r.h / 2
elif name == 'centerx':
return self._r.x + self._r.w / 2
elif name == 'centery':
return self._r.y + self._r.h / 2
elif name == 'size':
return self._r.w, self._r.h
elif name == 'width':
return self._r.w
elif name == 'height':
return self._r.h
else:
raise AttributeError(name)
def __setattr__(self, name, value):
if name == 'top' or name == 'y':
self._r.y = value
elif name == 'left' or name == 'x':
self._r.x = int(value)
elif name == 'bottom':
self._r.y = int(value) - self._r.h
elif name == 'right':
self._r.x = int(value) - self._r.w
elif name == 'topleft':
self._r.x = int(value[0])
self._r.y = int(value[1])
elif name == 'bottomleft':
self._r.x = int(value[0])
self._r.y = int(value[1]) - self._r.h
elif name == 'topright':
self._r.x = int(value[0]) - self._r.w
self._r.y = int(value[1])
elif name == 'bottomright':
self._r.x = int(value[0]) - self._r.w
self._r.y = int(value[1]) - self._r.h
elif name == 'midtop':
self._r.x = int(value[0]) - self._r.w / 2
self._r.y = int(value[1])
elif name == 'midleft':
self._r.x = int(value[0])
self._r.y = int(value[1]) - self._r.h / 2
elif name == 'midbottom':
self._r.x = int(value[0]) - self._r.w / 2
self._r.y = int(value[1]) - self._r.h
elif name == 'midright':
self._r.x = int(value[0]) - self._r.w
self._r.y = int(value[1]) - self._r.h / 2
elif name == 'center':
self._r.x = int(value[0]) - self._r.w / 2
self._r.y = int(value[1]) - self._r.h / 2
elif name == 'centerx':
self._r.x = int(value) - self._r.w / 2
elif name == 'centery':
self._r.y = int(value) - self._r.h / 2
elif name == 'size':
if int(value[0]) < 0 or int(value[1]) < 0:
self._ensure_proxy()
self._r.w, self._r.h = int(value)
elif name == 'width':
if int(value) < 0:
self._ensure_proxy()
self._r.w = int(value)
elif name == 'height':
if int(value) < 0:
self._ensure_proxy()
self._r.h = int(value)
else:
raise AttributeError(name)
def _ensure_proxy(self):
if not isinstance(self._r, _RectProxy):
object.__setattr__(self, '_r', _RectProxy(self._r))
def __len__(self):
return 4
def __getitem__(self, key):
return (self._r.x, self._r.y, self._r.w, self._r.h)[key]
def __setitem__(self, key, value):
r = [self._r.x, self._r.y, self._r.w, self._r.h]
r[key] = value
self._r.x, self._r.y, self._r.w, self._r.h = r
def __coerce__(self, *other):
try:
return self, Rect(*other)
except TypeError:
return None
def move(self, *pos):
x, y = _two_ints_from_args(pos)
return Rect(self._r.x + x, self._r.y + y, self._r.w, self._r.h)
def move_ip(self, *pos):
x, y = _two_ints_from_args(pos)
self._r.x += x
self._r.y += y
def inflate(self, x, y):
return Rect(self._r.x - x / 2, self._r.y - y / 2,
self._r.w + x, self._r.h + y)
def inflate_ip(self, x, y):
self._r.x -= x / 2
self._r.y -= y / 2
self._r.w += x
self._r.h += y
def clamp(self, *other):
r = Rect(self)
r.clamp_ip(*other)
return r
def clamp_ip(self, *other):
other = _rect_from_object(other)._r
if self._r.w >= other.w:
x = other.x + other.w / 2 - self._r.w / 2
elif self._r.x < other.x:
x = other.x
elif self._r.x + self._r.w > other.x + other.w:
x = other.x + other.w - self._r.w
else:
x = self._r.x
if self._r.h >= other.h:
y = other.y + other.h / 2 - self._r.h / 2
elif self._r.y < other.y:
y = other.y
elif self._r.y + self._r.h > other.y + other.h:
y = other.y + other.h - self._r.h
else:
y = self._r.y
self._r.x, self._r.y = x, y
def clip(self, *other):
r = Rect(self)
r.clip_ip(*other)
return r
def clip_ip(self, *other):
other = _rect_from_object(other)._r
x = max(self._r.x, other.x)
w = min(self._r.x + self._r.w, other.x + other.w) - x
y = max(self._r.y, other.y)
h = min(self._r.y + self._r.h, other.y + other.h) - y
if w <= 0 or h <= 0:
self._r.w, self._r.h = 0, 0
else:
self._r.x, self._r.y, self._r.w, self._r.h = x, y, w, h
def union(self, *other):
r = Rect(self)
r.union_ip(*other)
return r
def union_ip(self, *other):
other = _rect_from_object(other)._r
x = min(self._r.x, other.x)
y = min(self._r.y, other.y)
w = max(self._r.x + self._r.w, other.x + other.w) - x
h = max(self._r.y + self._r.h, other.y + other.h) - y
self._r.x, self._r.y, self._r.w, self._r.h = x, y, w, h
def unionall(self, others):
r = Rect(self)
r.unionall_ip(others)
return r
def unionall_ip(self, others):
l = self._r.x
r = self._r.x + self._r.w
t = self._r.y
b = self._r.y + self._r.h
for other in others:
other = _rect_from_object(other)._r
l = min(l, other.x)
r = max(r, other.x + other.w)
t = min(t, other.y)
b = max(b, other.y + other.h)
self._r.x, self._r.y, self._r.w, self._r.h = l, t, r - l, b - t
def fit(self, *other):
r = Rect(self)
r.fit_ip(*other)
return r
def fit_ip(self, *other):
other = _rect_from_object(other)._r
xratio = self._r.w / float(other.w)
yratio = self._r.h / float(other.h)
maxratio = max(xratio, yratio)
self._r.w = int(self._r.w / maxratio)
self._r.h = int(self._r.h / maxratio)
self._r.x = other.x + (other.w - self._r.w) / 2
self._r.y = other.y + (other.h - self._r.h) / 2
def normalize(self):
if self._r.w < 0:
self._r.x += self._r.w
self._r.w = -self._r.w
if self._r.h < 0:
self._r.y += self._r.h
self._r.h = -self._r.h
if isinstance(self._r, _RectProxy):
object.__setattr__(self, '_r', SDL.SDL_Rect(self._r.x,
self._r.y,
self._r.w,
self._r.h))
def contains(self, *other):
other = _rect_from_object(other)._r
return self._r.x <= other.x and \
self._r.y <= other.y and \
self._r.x + self._r.w >= other.x + other.w and \
self._r.y + self._r.h >= other.y + other.h and \
self._r.x + self._r.w > other.x and \
self._r.y + self._r.h > other.y
def collidepoint(self, x, y):
return x >= self._r.x and \
y >= self._r.y and \
x < self._r.x + self._r.w and \
y < self._r.y + self._r.h
def colliderect(self, *other):
return _rect_collide(self._r, _rect_from_object(other)._r)
def collidelist(self, others):
for i in range(len(others)):
if _rect_collide(self._r, _rect_from_object(others[i])._r):
return i
return -1
def collidelistall(self, others):
matches = []
for i in range(len(others)):
if _rect_collide(self._r, _rect_from_object(others[i])._r):
matches.append(i)
return matches
def collidedict(self, d):
for key, other in d.items():
if _rect_collide(self._r, _rect_from_object(other)._r):
return key, other
return None
def collidedictall(self, d):
matches = []
for key, other in d.items():
if _rect_collide(self._r, _rect_from_object(other)._r):
matches.append((key, other))
return matches
def _rect_from_object(obj):
if isinstance(obj, Rect):
return obj
if type(obj) in (tuple, list):
return Rect(*obj)
else:
return Rect(obj)
def _rect_collide(a, b):
return a.x + a.w > b.x and b.x + b.w > a.x and \
a.y + a.h > b.y and b.y + b.h > a.y
def _two_ints_from_args(arg):
if len(arg) == 1:
return _two_ints_from_args(arg[0])
else:
return arg[:2]
|
agpl-3.0
|
omni5cience/django-inlineformfield
|
.tox/py27/lib/python2.7/site-packages/IPython/nbformat/v1/rwbase.py
|
31
|
1479
|
"""Base classes and function for readers and writers.
Authors:
* Brian Granger
"""
#-----------------------------------------------------------------------------
# Copyright (C) 2008-2011 The IPython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
from base64 import encodestring, decodestring
#-----------------------------------------------------------------------------
# Code
#-----------------------------------------------------------------------------
class NotebookReader(object):
def reads(self, s, **kwargs):
"""Read a notebook from a string."""
raise NotImplementedError("loads must be implemented in a subclass")
def read(self, fp, **kwargs):
"""Read a notebook from a file like object"""
return self.reads(fp.read(), **kwargs)
class NotebookWriter(object):
def writes(self, nb, **kwargs):
"""Write a notebook to a string."""
raise NotImplementedError("loads must be implemented in a subclass")
def write(self, nb, fp, **kwargs):
"""Write a notebook to a file like object"""
return fp.write(self.writes(nb,**kwargs))
|
mit
|
apprentice3d/Wox
|
PythonHome/Lib/site-packages/chardet/charsetprober.py
|
3127
|
1902
|
######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Universal charset detector code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 2001
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
# Shy Shalom - original C code
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from . import constants
import re
class CharSetProber:
def __init__(self):
pass
def reset(self):
self._mState = constants.eDetecting
def get_charset_name(self):
return None
def feed(self, aBuf):
pass
def get_state(self):
return self._mState
def get_confidence(self):
return 0.0
def filter_high_bit_only(self, aBuf):
aBuf = re.sub(b'([\x00-\x7F])+', b' ', aBuf)
return aBuf
def filter_without_english_letters(self, aBuf):
aBuf = re.sub(b'([A-Za-z])+', b' ', aBuf)
return aBuf
def filter_with_english_letters(self, aBuf):
# TODO
return aBuf
|
mit
|
kod3r/neon
|
tests/test_initializers.py
|
10
|
2621
|
# ----------------------------------------------------------------------------
# Copyright 2015 Nervana Systems Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ----------------------------------------------------------------------------
'''
Tests for the initializer classes.
'''
import itertools as itt
import numpy as np
from neon import NervanaObject
from neon.initializers.initializer import Constant, Uniform, Gaussian, GlorotUniform
def pytest_generate_tests(metafunc):
if 'args' in metafunc.fixturenames:
fargs = []
dim1 = [1, 5]
dim2 = [2, 10]
fargs = itt.product(dim1, dim2)
metafunc.parametrize('args', fargs)
def test_constant(backend, args):
be = NervanaObject.be
dim1, dim2 = args
shape = (dim1, dim2)
const_arg = 3
Wdev = be.empty(shape)
const_init = Constant(const_arg)
const_init.fill(Wdev)
Whost = Wdev.get()
flat = Whost.flatten()
for elt in flat:
assert elt == const_arg
return
def test_uniform(backend, args):
be = NervanaObject.be
dim1, dim2 = args
shape = (dim1, dim2)
Wdev = be.empty(shape)
uniform_init = Uniform(low=-5, high=15)
uniform_init.fill(Wdev)
Whost = Wdev.get()
flat = Whost.flatten()
for elt in flat:
assert elt <= 15 and elt >= -5
return
def test_gaussian(backend, args):
be = NervanaObject.be
dim1, dim2 = args
shape = (dim1, dim2)
Wdev = be.empty(shape)
gaussian_init = Gaussian(loc=10000, scale=1)
gaussian_init.fill(Wdev)
Whost = Wdev.get()
flat = Whost.flatten()
for elt in flat:
# Not a very robust test...
assert elt >= 0
return
def test_glorot(backend, args):
be = NervanaObject.be
shape_1 = (1, 2)
shape_2 = (1000, 10000)
Wdev_1 = be.empty(shape_1)
Wdev_2 = be.empty(shape_2)
glorot_init = GlorotUniform()
glorot_init.fill(Wdev_1)
glorot_init.fill(Wdev_2)
Whost_1 = Wdev_1.get()
Whost_2 = Wdev_2.get()
mean_1 = np.mean(Whost_1)
mean_2 = np.mean(Whost_2)
assert np.abs(mean_1) > np.abs(mean_2)
return
|
apache-2.0
|
christianurich/VIBe2UrbanSim
|
3rdparty/opus/src/urbansim/gridcell/vacant_SSS_sqft_from_buildings.py
|
2
|
2287
|
# Opus/UrbanSim urban simulation software.
# Copyright (C) 2005-2009 University of Washington
# See opus_core/LICENSE
from opus_core.variables.variable import Variable
from variable_functions import my_attribute_label
from opus_core.misc import clip_to_zero_if_needed
class vacant_SSS_sqft_from_buildings(Variable):
""" The sqft_of_SSS_buildings - sqft_of_SSS_jobs. """
_return_type="float32"
def __init__(self, type):
self.buildings_sqft_variable = "buildings_%s_sqft" % type
self.sqft_of_jobs = "sqft_of_%s_jobs" % type
self.type = type
Variable.__init__(self)
def dependencies(self):
return [my_attribute_label(self.buildings_sqft_variable),
"urbansim.gridcell.%s" % self.sqft_of_jobs]
def compute(self, dataset_pool):
sqft = self.get_dataset().get_attribute(self.buildings_sqft_variable)
return clip_to_zero_if_needed(sqft -
self.get_dataset().get_attribute(self.sqft_of_jobs), 'vacant_%s_sqft_from_buildings' % self.type)
def post_check(self, values, dataset_pool):
global_max = self.get_dataset().get_attribute(self.buildings_sqft_variable).max()
self.do_check("x >= 0 and x <= %s" % global_max, values)
from opus_core.tests import opus_unittest
from opus_core.tests.utils.variable_tester import VariableTester
from numpy import array
class Tests(opus_unittest.OpusTestCase):
def test_my_inputs(self):
sqft_of_commercial_jobs = array([1225, 5000, 7600])
commercial_sqft = array([1995, 10000, 7500])
tester = VariableTester(
__file__,
package_order=['urbansim'],
test_data={
"gridcell":{
"grid_id":array([1,2,3]),
"sqft_of_commercial_jobs":sqft_of_commercial_jobs,
"buildings_commercial_sqft":commercial_sqft
}
}
)
should_be = array([770, 5000, 0])
instance_name = "urbansim.gridcell.vacant_commercial_sqft_from_buildings"
tester.test_is_equal_for_family_variable(self, should_be, instance_name)
if __name__=='__main__':
opus_unittest.main()
|
gpl-2.0
|
sclabs/sitestatus-nonrel
|
django/contrib/auth/decorators.py
|
230
|
2388
|
import urlparse
try:
from functools import wraps
except ImportError:
from django.utils.functional import wraps # Python 2.4 fallback.
from django.conf import settings
from django.contrib.auth import REDIRECT_FIELD_NAME
from django.utils.decorators import available_attrs
def user_passes_test(test_func, login_url=None, redirect_field_name=REDIRECT_FIELD_NAME):
"""
Decorator for views that checks that the user passes the given test,
redirecting to the log-in page if necessary. The test should be a callable
that takes the user object and returns True if the user passes.
"""
def decorator(view_func):
@wraps(view_func, assigned=available_attrs(view_func))
def _wrapped_view(request, *args, **kwargs):
if test_func(request.user):
return view_func(request, *args, **kwargs)
path = request.build_absolute_uri()
# If the login url is the same scheme and net location then just
# use the path as the "next" url.
login_scheme, login_netloc = urlparse.urlparse(login_url or
settings.LOGIN_URL)[:2]
current_scheme, current_netloc = urlparse.urlparse(path)[:2]
if ((not login_scheme or login_scheme == current_scheme) and
(not login_netloc or login_netloc == current_netloc)):
path = request.get_full_path()
from django.contrib.auth.views import redirect_to_login
return redirect_to_login(path, login_url, redirect_field_name)
return _wrapped_view
return decorator
def login_required(function=None, redirect_field_name=REDIRECT_FIELD_NAME, login_url=None):
"""
Decorator for views that checks that the user is logged in, redirecting
to the log-in page if necessary.
"""
actual_decorator = user_passes_test(
lambda u: u.is_authenticated(),
login_url=login_url,
redirect_field_name=redirect_field_name
)
if function:
return actual_decorator(function)
return actual_decorator
def permission_required(perm, login_url=None):
"""
Decorator for views that checks whether a user has a particular permission
enabled, redirecting to the log-in page if necessary.
"""
return user_passes_test(lambda u: u.has_perm(perm), login_url=login_url)
|
bsd-3-clause
|
artful-addict/rwd-starter-kit
|
node_modules/node-gyp/gyp/pylib/gyp/generator/android.py
|
960
|
45344
|
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# Notes:
#
# This generates makefiles suitable for inclusion into the Android build system
# via an Android.mk file. It is based on make.py, the standard makefile
# generator.
#
# The code below generates a separate .mk file for each target, but
# all are sourced by the top-level GypAndroid.mk. This means that all
# variables in .mk-files clobber one another, and furthermore that any
# variables set potentially clash with other Android build system variables.
# Try to avoid setting global variables where possible.
import gyp
import gyp.common
import gyp.generator.make as make # Reuse global functions from make backend.
import os
import re
import subprocess
generator_default_variables = {
'OS': 'android',
'EXECUTABLE_PREFIX': '',
'EXECUTABLE_SUFFIX': '',
'STATIC_LIB_PREFIX': 'lib',
'SHARED_LIB_PREFIX': 'lib',
'STATIC_LIB_SUFFIX': '.a',
'SHARED_LIB_SUFFIX': '.so',
'INTERMEDIATE_DIR': '$(gyp_intermediate_dir)',
'SHARED_INTERMEDIATE_DIR': '$(gyp_shared_intermediate_dir)',
'PRODUCT_DIR': '$(gyp_shared_intermediate_dir)',
'SHARED_LIB_DIR': '$(builddir)/lib.$(TOOLSET)',
'LIB_DIR': '$(obj).$(TOOLSET)',
'RULE_INPUT_ROOT': '%(INPUT_ROOT)s', # This gets expanded by Python.
'RULE_INPUT_DIRNAME': '%(INPUT_DIRNAME)s', # This gets expanded by Python.
'RULE_INPUT_PATH': '$(RULE_SOURCES)',
'RULE_INPUT_EXT': '$(suffix $<)',
'RULE_INPUT_NAME': '$(notdir $<)',
'CONFIGURATION_NAME': '$(GYP_CONFIGURATION)',
}
# Make supports multiple toolsets
generator_supports_multiple_toolsets = True
# Generator-specific gyp specs.
generator_additional_non_configuration_keys = [
# Boolean to declare that this target does not want its name mangled.
'android_unmangled_name',
# Map of android build system variables to set.
'aosp_build_settings',
]
generator_additional_path_sections = []
generator_extra_sources_for_rules = []
ALL_MODULES_FOOTER = """\
# "gyp_all_modules" is a concatenation of the "gyp_all_modules" targets from
# all the included sub-makefiles. This is just here to clarify.
gyp_all_modules:
"""
header = """\
# This file is generated by gyp; do not edit.
"""
# Map gyp target types to Android module classes.
MODULE_CLASSES = {
'static_library': 'STATIC_LIBRARIES',
'shared_library': 'SHARED_LIBRARIES',
'executable': 'EXECUTABLES',
}
def IsCPPExtension(ext):
return make.COMPILABLE_EXTENSIONS.get(ext) == 'cxx'
def Sourceify(path):
"""Convert a path to its source directory form. The Android backend does not
support options.generator_output, so this function is a noop."""
return path
# Map from qualified target to path to output.
# For Android, the target of these maps is a tuple ('static', 'modulename'),
# ('dynamic', 'modulename'), or ('path', 'some/path') instead of a string,
# since we link by module.
target_outputs = {}
# Map from qualified target to any linkable output. A subset
# of target_outputs. E.g. when mybinary depends on liba, we want to
# include liba in the linker line; when otherbinary depends on
# mybinary, we just want to build mybinary first.
target_link_deps = {}
class AndroidMkWriter(object):
"""AndroidMkWriter packages up the writing of one target-specific Android.mk.
Its only real entry point is Write(), and is mostly used for namespacing.
"""
def __init__(self, android_top_dir):
self.android_top_dir = android_top_dir
def Write(self, qualified_target, relative_target, base_path, output_filename,
spec, configs, part_of_all, write_alias_target, sdk_version):
"""The main entry point: writes a .mk file for a single target.
Arguments:
qualified_target: target we're generating
relative_target: qualified target name relative to the root
base_path: path relative to source root we're building in, used to resolve
target-relative paths
output_filename: output .mk file name to write
spec, configs: gyp info
part_of_all: flag indicating this target is part of 'all'
write_alias_target: flag indicating whether to create short aliases for
this target
sdk_version: what to emit for LOCAL_SDK_VERSION in output
"""
gyp.common.EnsureDirExists(output_filename)
self.fp = open(output_filename, 'w')
self.fp.write(header)
self.qualified_target = qualified_target
self.relative_target = relative_target
self.path = base_path
self.target = spec['target_name']
self.type = spec['type']
self.toolset = spec['toolset']
deps, link_deps = self.ComputeDeps(spec)
# Some of the generation below can add extra output, sources, or
# link dependencies. All of the out params of the functions that
# follow use names like extra_foo.
extra_outputs = []
extra_sources = []
self.android_class = MODULE_CLASSES.get(self.type, 'GYP')
self.android_module = self.ComputeAndroidModule(spec)
(self.android_stem, self.android_suffix) = self.ComputeOutputParts(spec)
self.output = self.output_binary = self.ComputeOutput(spec)
# Standard header.
self.WriteLn('include $(CLEAR_VARS)\n')
# Module class and name.
self.WriteLn('LOCAL_MODULE_CLASS := ' + self.android_class)
self.WriteLn('LOCAL_MODULE := ' + self.android_module)
# Only emit LOCAL_MODULE_STEM if it's different to LOCAL_MODULE.
# The library module classes fail if the stem is set. ComputeOutputParts
# makes sure that stem == modulename in these cases.
if self.android_stem != self.android_module:
self.WriteLn('LOCAL_MODULE_STEM := ' + self.android_stem)
self.WriteLn('LOCAL_MODULE_SUFFIX := ' + self.android_suffix)
if self.toolset == 'host':
self.WriteLn('LOCAL_IS_HOST_MODULE := true')
self.WriteLn('LOCAL_MULTILIB := $(GYP_HOST_MULTILIB)')
elif sdk_version > 0:
self.WriteLn('LOCAL_MODULE_TARGET_ARCH := '
'$(TARGET_$(GYP_VAR_PREFIX)ARCH)')
self.WriteLn('LOCAL_SDK_VERSION := %s' % sdk_version)
# Grab output directories; needed for Actions and Rules.
if self.toolset == 'host':
self.WriteLn('gyp_intermediate_dir := '
'$(call local-intermediates-dir,,$(GYP_HOST_VAR_PREFIX))')
else:
self.WriteLn('gyp_intermediate_dir := '
'$(call local-intermediates-dir,,$(GYP_VAR_PREFIX))')
self.WriteLn('gyp_shared_intermediate_dir := '
'$(call intermediates-dir-for,GYP,shared,,,$(GYP_VAR_PREFIX))')
self.WriteLn()
# List files this target depends on so that actions/rules/copies/sources
# can depend on the list.
# TODO: doesn't pull in things through transitive link deps; needed?
target_dependencies = [x[1] for x in deps if x[0] == 'path']
self.WriteLn('# Make sure our deps are built first.')
self.WriteList(target_dependencies, 'GYP_TARGET_DEPENDENCIES',
local_pathify=True)
# Actions must come first, since they can generate more OBJs for use below.
if 'actions' in spec:
self.WriteActions(spec['actions'], extra_sources, extra_outputs)
# Rules must be early like actions.
if 'rules' in spec:
self.WriteRules(spec['rules'], extra_sources, extra_outputs)
if 'copies' in spec:
self.WriteCopies(spec['copies'], extra_outputs)
# GYP generated outputs.
self.WriteList(extra_outputs, 'GYP_GENERATED_OUTPUTS', local_pathify=True)
# Set LOCAL_ADDITIONAL_DEPENDENCIES so that Android's build rules depend
# on both our dependency targets and our generated files.
self.WriteLn('# Make sure our deps and generated files are built first.')
self.WriteLn('LOCAL_ADDITIONAL_DEPENDENCIES := $(GYP_TARGET_DEPENDENCIES) '
'$(GYP_GENERATED_OUTPUTS)')
self.WriteLn()
# Sources.
if spec.get('sources', []) or extra_sources:
self.WriteSources(spec, configs, extra_sources)
self.WriteTarget(spec, configs, deps, link_deps, part_of_all,
write_alias_target)
# Update global list of target outputs, used in dependency tracking.
target_outputs[qualified_target] = ('path', self.output_binary)
# Update global list of link dependencies.
if self.type == 'static_library':
target_link_deps[qualified_target] = ('static', self.android_module)
elif self.type == 'shared_library':
target_link_deps[qualified_target] = ('shared', self.android_module)
self.fp.close()
return self.android_module
def WriteActions(self, actions, extra_sources, extra_outputs):
"""Write Makefile code for any 'actions' from the gyp input.
extra_sources: a list that will be filled in with newly generated source
files, if any
extra_outputs: a list that will be filled in with any outputs of these
actions (used to make other pieces dependent on these
actions)
"""
for action in actions:
name = make.StringToMakefileVariable('%s_%s' % (self.relative_target,
action['action_name']))
self.WriteLn('### Rules for action "%s":' % action['action_name'])
inputs = action['inputs']
outputs = action['outputs']
# Build up a list of outputs.
# Collect the output dirs we'll need.
dirs = set()
for out in outputs:
if not out.startswith('$'):
print ('WARNING: Action for target "%s" writes output to local path '
'"%s".' % (self.target, out))
dir = os.path.split(out)[0]
if dir:
dirs.add(dir)
if int(action.get('process_outputs_as_sources', False)):
extra_sources += outputs
# Prepare the actual command.
command = gyp.common.EncodePOSIXShellList(action['action'])
if 'message' in action:
quiet_cmd = 'Gyp action: %s ($@)' % action['message']
else:
quiet_cmd = 'Gyp action: %s ($@)' % name
if len(dirs) > 0:
command = 'mkdir -p %s' % ' '.join(dirs) + '; ' + command
cd_action = 'cd $(gyp_local_path)/%s; ' % self.path
command = cd_action + command
# The makefile rules are all relative to the top dir, but the gyp actions
# are defined relative to their containing dir. This replaces the gyp_*
# variables for the action rule with an absolute version so that the
# output goes in the right place.
# Only write the gyp_* rules for the "primary" output (:1);
# it's superfluous for the "extra outputs", and this avoids accidentally
# writing duplicate dummy rules for those outputs.
main_output = make.QuoteSpaces(self.LocalPathify(outputs[0]))
self.WriteLn('%s: gyp_local_path := $(LOCAL_PATH)' % main_output)
self.WriteLn('%s: gyp_var_prefix := $(GYP_VAR_PREFIX)' % main_output)
self.WriteLn('%s: gyp_intermediate_dir := '
'$(abspath $(gyp_intermediate_dir))' % main_output)
self.WriteLn('%s: gyp_shared_intermediate_dir := '
'$(abspath $(gyp_shared_intermediate_dir))' % main_output)
# Android's envsetup.sh adds a number of directories to the path including
# the built host binary directory. This causes actions/rules invoked by
# gyp to sometimes use these instead of system versions, e.g. bison.
# The built host binaries may not be suitable, and can cause errors.
# So, we remove them from the PATH using the ANDROID_BUILD_PATHS variable
# set by envsetup.
self.WriteLn('%s: export PATH := $(subst $(ANDROID_BUILD_PATHS),,$(PATH))'
% main_output)
# Don't allow spaces in input/output filenames, but make an exception for
# filenames which start with '$(' since it's okay for there to be spaces
# inside of make function/macro invocations.
for input in inputs:
if not input.startswith('$(') and ' ' in input:
raise gyp.common.GypError(
'Action input filename "%s" in target %s contains a space' %
(input, self.target))
for output in outputs:
if not output.startswith('$(') and ' ' in output:
raise gyp.common.GypError(
'Action output filename "%s" in target %s contains a space' %
(output, self.target))
self.WriteLn('%s: %s $(GYP_TARGET_DEPENDENCIES)' %
(main_output, ' '.join(map(self.LocalPathify, inputs))))
self.WriteLn('\t@echo "%s"' % quiet_cmd)
self.WriteLn('\t$(hide)%s\n' % command)
for output in outputs[1:]:
# Make each output depend on the main output, with an empty command
# to force make to notice that the mtime has changed.
self.WriteLn('%s: %s ;' % (self.LocalPathify(output), main_output))
extra_outputs += outputs
self.WriteLn()
self.WriteLn()
def WriteRules(self, rules, extra_sources, extra_outputs):
"""Write Makefile code for any 'rules' from the gyp input.
extra_sources: a list that will be filled in with newly generated source
files, if any
extra_outputs: a list that will be filled in with any outputs of these
rules (used to make other pieces dependent on these rules)
"""
if len(rules) == 0:
return
for rule in rules:
if len(rule.get('rule_sources', [])) == 0:
continue
name = make.StringToMakefileVariable('%s_%s' % (self.relative_target,
rule['rule_name']))
self.WriteLn('\n### Generated for rule "%s":' % name)
self.WriteLn('# "%s":' % rule)
inputs = rule.get('inputs')
for rule_source in rule.get('rule_sources', []):
(rule_source_dirname, rule_source_basename) = os.path.split(rule_source)
(rule_source_root, rule_source_ext) = \
os.path.splitext(rule_source_basename)
outputs = [self.ExpandInputRoot(out, rule_source_root,
rule_source_dirname)
for out in rule['outputs']]
dirs = set()
for out in outputs:
if not out.startswith('$'):
print ('WARNING: Rule for target %s writes output to local path %s'
% (self.target, out))
dir = os.path.dirname(out)
if dir:
dirs.add(dir)
extra_outputs += outputs
if int(rule.get('process_outputs_as_sources', False)):
extra_sources.extend(outputs)
components = []
for component in rule['action']:
component = self.ExpandInputRoot(component, rule_source_root,
rule_source_dirname)
if '$(RULE_SOURCES)' in component:
component = component.replace('$(RULE_SOURCES)',
rule_source)
components.append(component)
command = gyp.common.EncodePOSIXShellList(components)
cd_action = 'cd $(gyp_local_path)/%s; ' % self.path
command = cd_action + command
if dirs:
command = 'mkdir -p %s' % ' '.join(dirs) + '; ' + command
# We set up a rule to build the first output, and then set up
# a rule for each additional output to depend on the first.
outputs = map(self.LocalPathify, outputs)
main_output = outputs[0]
self.WriteLn('%s: gyp_local_path := $(LOCAL_PATH)' % main_output)
self.WriteLn('%s: gyp_var_prefix := $(GYP_VAR_PREFIX)' % main_output)
self.WriteLn('%s: gyp_intermediate_dir := '
'$(abspath $(gyp_intermediate_dir))' % main_output)
self.WriteLn('%s: gyp_shared_intermediate_dir := '
'$(abspath $(gyp_shared_intermediate_dir))' % main_output)
# See explanation in WriteActions.
self.WriteLn('%s: export PATH := '
'$(subst $(ANDROID_BUILD_PATHS),,$(PATH))' % main_output)
main_output_deps = self.LocalPathify(rule_source)
if inputs:
main_output_deps += ' '
main_output_deps += ' '.join([self.LocalPathify(f) for f in inputs])
self.WriteLn('%s: %s $(GYP_TARGET_DEPENDENCIES)' %
(main_output, main_output_deps))
self.WriteLn('\t%s\n' % command)
for output in outputs[1:]:
# Make each output depend on the main output, with an empty command
# to force make to notice that the mtime has changed.
self.WriteLn('%s: %s ;' % (output, main_output))
self.WriteLn()
self.WriteLn()
def WriteCopies(self, copies, extra_outputs):
"""Write Makefile code for any 'copies' from the gyp input.
extra_outputs: a list that will be filled in with any outputs of this action
(used to make other pieces dependent on this action)
"""
self.WriteLn('### Generated for copy rule.')
variable = make.StringToMakefileVariable(self.relative_target + '_copies')
outputs = []
for copy in copies:
for path in copy['files']:
# The Android build system does not allow generation of files into the
# source tree. The destination should start with a variable, which will
# typically be $(gyp_intermediate_dir) or
# $(gyp_shared_intermediate_dir). Note that we can't use an assertion
# because some of the gyp tests depend on this.
if not copy['destination'].startswith('$'):
print ('WARNING: Copy rule for target %s writes output to '
'local path %s' % (self.target, copy['destination']))
# LocalPathify() calls normpath, stripping trailing slashes.
path = Sourceify(self.LocalPathify(path))
filename = os.path.split(path)[1]
output = Sourceify(self.LocalPathify(os.path.join(copy['destination'],
filename)))
self.WriteLn('%s: %s $(GYP_TARGET_DEPENDENCIES) | $(ACP)' %
(output, path))
self.WriteLn('\t@echo Copying: $@')
self.WriteLn('\t$(hide) mkdir -p $(dir $@)')
self.WriteLn('\t$(hide) $(ACP) -rpf $< $@')
self.WriteLn()
outputs.append(output)
self.WriteLn('%s = %s' % (variable,
' '.join(map(make.QuoteSpaces, outputs))))
extra_outputs.append('$(%s)' % variable)
self.WriteLn()
def WriteSourceFlags(self, spec, configs):
"""Write out the flags and include paths used to compile source files for
the current target.
Args:
spec, configs: input from gyp.
"""
for configname, config in sorted(configs.iteritems()):
extracted_includes = []
self.WriteLn('\n# Flags passed to both C and C++ files.')
cflags, includes_from_cflags = self.ExtractIncludesFromCFlags(
config.get('cflags', []) + config.get('cflags_c', []))
extracted_includes.extend(includes_from_cflags)
self.WriteList(cflags, 'MY_CFLAGS_%s' % configname)
self.WriteList(config.get('defines'), 'MY_DEFS_%s' % configname,
prefix='-D', quoter=make.EscapeCppDefine)
self.WriteLn('\n# Include paths placed before CFLAGS/CPPFLAGS')
includes = list(config.get('include_dirs', []))
includes.extend(extracted_includes)
includes = map(Sourceify, map(self.LocalPathify, includes))
includes = self.NormalizeIncludePaths(includes)
self.WriteList(includes, 'LOCAL_C_INCLUDES_%s' % configname)
self.WriteLn('\n# Flags passed to only C++ (and not C) files.')
self.WriteList(config.get('cflags_cc'), 'LOCAL_CPPFLAGS_%s' % configname)
self.WriteLn('\nLOCAL_CFLAGS := $(MY_CFLAGS_$(GYP_CONFIGURATION)) '
'$(MY_DEFS_$(GYP_CONFIGURATION))')
# Undefine ANDROID for host modules
# TODO: the source code should not use macro ANDROID to tell if it's host
# or target module.
if self.toolset == 'host':
self.WriteLn('# Undefine ANDROID for host modules')
self.WriteLn('LOCAL_CFLAGS += -UANDROID')
self.WriteLn('LOCAL_C_INCLUDES := $(GYP_COPIED_SOURCE_ORIGIN_DIRS) '
'$(LOCAL_C_INCLUDES_$(GYP_CONFIGURATION))')
self.WriteLn('LOCAL_CPPFLAGS := $(LOCAL_CPPFLAGS_$(GYP_CONFIGURATION))')
# Android uses separate flags for assembly file invocations, but gyp expects
# the same CFLAGS to be applied:
self.WriteLn('LOCAL_ASFLAGS := $(LOCAL_CFLAGS)')
def WriteSources(self, spec, configs, extra_sources):
"""Write Makefile code for any 'sources' from the gyp input.
These are source files necessary to build the current target.
We need to handle shared_intermediate directory source files as
a special case by copying them to the intermediate directory and
treating them as a genereated sources. Otherwise the Android build
rules won't pick them up.
Args:
spec, configs: input from gyp.
extra_sources: Sources generated from Actions or Rules.
"""
sources = filter(make.Compilable, spec.get('sources', []))
generated_not_sources = [x for x in extra_sources if not make.Compilable(x)]
extra_sources = filter(make.Compilable, extra_sources)
# Determine and output the C++ extension used by these sources.
# We simply find the first C++ file and use that extension.
all_sources = sources + extra_sources
local_cpp_extension = '.cpp'
for source in all_sources:
(root, ext) = os.path.splitext(source)
if IsCPPExtension(ext):
local_cpp_extension = ext
break
if local_cpp_extension != '.cpp':
self.WriteLn('LOCAL_CPP_EXTENSION := %s' % local_cpp_extension)
# We need to move any non-generated sources that are coming from the
# shared intermediate directory out of LOCAL_SRC_FILES and put them
# into LOCAL_GENERATED_SOURCES. We also need to move over any C++ files
# that don't match our local_cpp_extension, since Android will only
# generate Makefile rules for a single LOCAL_CPP_EXTENSION.
local_files = []
for source in sources:
(root, ext) = os.path.splitext(source)
if '$(gyp_shared_intermediate_dir)' in source:
extra_sources.append(source)
elif '$(gyp_intermediate_dir)' in source:
extra_sources.append(source)
elif IsCPPExtension(ext) and ext != local_cpp_extension:
extra_sources.append(source)
else:
local_files.append(os.path.normpath(os.path.join(self.path, source)))
# For any generated source, if it is coming from the shared intermediate
# directory then we add a Make rule to copy them to the local intermediate
# directory first. This is because the Android LOCAL_GENERATED_SOURCES
# must be in the local module intermediate directory for the compile rules
# to work properly. If the file has the wrong C++ extension, then we add
# a rule to copy that to intermediates and use the new version.
final_generated_sources = []
# If a source file gets copied, we still need to add the orginal source
# directory as header search path, for GCC searches headers in the
# directory that contains the source file by default.
origin_src_dirs = []
for source in extra_sources:
local_file = source
if not '$(gyp_intermediate_dir)/' in local_file:
basename = os.path.basename(local_file)
local_file = '$(gyp_intermediate_dir)/' + basename
(root, ext) = os.path.splitext(local_file)
if IsCPPExtension(ext) and ext != local_cpp_extension:
local_file = root + local_cpp_extension
if local_file != source:
self.WriteLn('%s: %s' % (local_file, self.LocalPathify(source)))
self.WriteLn('\tmkdir -p $(@D); cp $< $@')
origin_src_dirs.append(os.path.dirname(source))
final_generated_sources.append(local_file)
# We add back in all of the non-compilable stuff to make sure that the
# make rules have dependencies on them.
final_generated_sources.extend(generated_not_sources)
self.WriteList(final_generated_sources, 'LOCAL_GENERATED_SOURCES')
origin_src_dirs = gyp.common.uniquer(origin_src_dirs)
origin_src_dirs = map(Sourceify, map(self.LocalPathify, origin_src_dirs))
self.WriteList(origin_src_dirs, 'GYP_COPIED_SOURCE_ORIGIN_DIRS')
self.WriteList(local_files, 'LOCAL_SRC_FILES')
# Write out the flags used to compile the source; this must be done last
# so that GYP_COPIED_SOURCE_ORIGIN_DIRS can be used as an include path.
self.WriteSourceFlags(spec, configs)
def ComputeAndroidModule(self, spec):
"""Return the Android module name used for a gyp spec.
We use the complete qualified target name to avoid collisions between
duplicate targets in different directories. We also add a suffix to
distinguish gyp-generated module names.
"""
if int(spec.get('android_unmangled_name', 0)):
assert self.type != 'shared_library' or self.target.startswith('lib')
return self.target
if self.type == 'shared_library':
# For reasons of convention, the Android build system requires that all
# shared library modules are named 'libfoo' when generating -l flags.
prefix = 'lib_'
else:
prefix = ''
if spec['toolset'] == 'host':
suffix = '_$(TARGET_$(GYP_VAR_PREFIX)ARCH)_host_gyp'
else:
suffix = '_gyp'
if self.path:
middle = make.StringToMakefileVariable('%s_%s' % (self.path, self.target))
else:
middle = make.StringToMakefileVariable(self.target)
return ''.join([prefix, middle, suffix])
def ComputeOutputParts(self, spec):
"""Return the 'output basename' of a gyp spec, split into filename + ext.
Android libraries must be named the same thing as their module name,
otherwise the linker can't find them, so product_name and so on must be
ignored if we are building a library, and the "lib" prepending is
not done for Android.
"""
assert self.type != 'loadable_module' # TODO: not supported?
target = spec['target_name']
target_prefix = ''
target_ext = ''
if self.type == 'static_library':
target = self.ComputeAndroidModule(spec)
target_ext = '.a'
elif self.type == 'shared_library':
target = self.ComputeAndroidModule(spec)
target_ext = '.so'
elif self.type == 'none':
target_ext = '.stamp'
elif self.type != 'executable':
print ("ERROR: What output file should be generated?",
"type", self.type, "target", target)
if self.type != 'static_library' and self.type != 'shared_library':
target_prefix = spec.get('product_prefix', target_prefix)
target = spec.get('product_name', target)
product_ext = spec.get('product_extension')
if product_ext:
target_ext = '.' + product_ext
target_stem = target_prefix + target
return (target_stem, target_ext)
def ComputeOutputBasename(self, spec):
"""Return the 'output basename' of a gyp spec.
E.g., the loadable module 'foobar' in directory 'baz' will produce
'libfoobar.so'
"""
return ''.join(self.ComputeOutputParts(spec))
def ComputeOutput(self, spec):
"""Return the 'output' (full output path) of a gyp spec.
E.g., the loadable module 'foobar' in directory 'baz' will produce
'$(obj)/baz/libfoobar.so'
"""
if self.type == 'executable':
# We install host executables into shared_intermediate_dir so they can be
# run by gyp rules that refer to PRODUCT_DIR.
path = '$(gyp_shared_intermediate_dir)'
elif self.type == 'shared_library':
if self.toolset == 'host':
path = '$($(GYP_HOST_VAR_PREFIX)HOST_OUT_INTERMEDIATE_LIBRARIES)'
else:
path = '$($(GYP_VAR_PREFIX)TARGET_OUT_INTERMEDIATE_LIBRARIES)'
else:
# Other targets just get built into their intermediate dir.
if self.toolset == 'host':
path = ('$(call intermediates-dir-for,%s,%s,true,,'
'$(GYP_HOST_VAR_PREFIX))' % (self.android_class,
self.android_module))
else:
path = ('$(call intermediates-dir-for,%s,%s,,,$(GYP_VAR_PREFIX))'
% (self.android_class, self.android_module))
assert spec.get('product_dir') is None # TODO: not supported?
return os.path.join(path, self.ComputeOutputBasename(spec))
def NormalizeIncludePaths(self, include_paths):
""" Normalize include_paths.
Convert absolute paths to relative to the Android top directory.
Args:
include_paths: A list of unprocessed include paths.
Returns:
A list of normalized include paths.
"""
normalized = []
for path in include_paths:
if path[0] == '/':
path = gyp.common.RelativePath(path, self.android_top_dir)
normalized.append(path)
return normalized
def ExtractIncludesFromCFlags(self, cflags):
"""Extract includes "-I..." out from cflags
Args:
cflags: A list of compiler flags, which may be mixed with "-I.."
Returns:
A tuple of lists: (clean_clfags, include_paths). "-I.." is trimmed.
"""
clean_cflags = []
include_paths = []
for flag in cflags:
if flag.startswith('-I'):
include_paths.append(flag[2:])
else:
clean_cflags.append(flag)
return (clean_cflags, include_paths)
def FilterLibraries(self, libraries):
"""Filter the 'libraries' key to separate things that shouldn't be ldflags.
Library entries that look like filenames should be converted to android
module names instead of being passed to the linker as flags.
Args:
libraries: the value of spec.get('libraries')
Returns:
A tuple (static_lib_modules, dynamic_lib_modules, ldflags)
"""
static_lib_modules = []
dynamic_lib_modules = []
ldflags = []
for libs in libraries:
# Libs can have multiple words.
for lib in libs.split():
# Filter the system libraries, which are added by default by the Android
# build system.
if (lib == '-lc' or lib == '-lstdc++' or lib == '-lm' or
lib.endswith('libgcc.a')):
continue
match = re.search(r'([^/]+)\.a$', lib)
if match:
static_lib_modules.append(match.group(1))
continue
match = re.search(r'([^/]+)\.so$', lib)
if match:
dynamic_lib_modules.append(match.group(1))
continue
if lib.startswith('-l'):
ldflags.append(lib)
return (static_lib_modules, dynamic_lib_modules, ldflags)
def ComputeDeps(self, spec):
"""Compute the dependencies of a gyp spec.
Returns a tuple (deps, link_deps), where each is a list of
filenames that will need to be put in front of make for either
building (deps) or linking (link_deps).
"""
deps = []
link_deps = []
if 'dependencies' in spec:
deps.extend([target_outputs[dep] for dep in spec['dependencies']
if target_outputs[dep]])
for dep in spec['dependencies']:
if dep in target_link_deps:
link_deps.append(target_link_deps[dep])
deps.extend(link_deps)
return (gyp.common.uniquer(deps), gyp.common.uniquer(link_deps))
def WriteTargetFlags(self, spec, configs, link_deps):
"""Write Makefile code to specify the link flags and library dependencies.
spec, configs: input from gyp.
link_deps: link dependency list; see ComputeDeps()
"""
# Libraries (i.e. -lfoo)
# These must be included even for static libraries as some of them provide
# implicit include paths through the build system.
libraries = gyp.common.uniquer(spec.get('libraries', []))
static_libs, dynamic_libs, ldflags_libs = self.FilterLibraries(libraries)
if self.type != 'static_library':
for configname, config in sorted(configs.iteritems()):
ldflags = list(config.get('ldflags', []))
self.WriteLn('')
self.WriteList(ldflags, 'LOCAL_LDFLAGS_%s' % configname)
self.WriteList(ldflags_libs, 'LOCAL_GYP_LIBS')
self.WriteLn('LOCAL_LDFLAGS := $(LOCAL_LDFLAGS_$(GYP_CONFIGURATION)) '
'$(LOCAL_GYP_LIBS)')
# Link dependencies (i.e. other gyp targets this target depends on)
# These need not be included for static libraries as within the gyp build
# we do not use the implicit include path mechanism.
if self.type != 'static_library':
static_link_deps = [x[1] for x in link_deps if x[0] == 'static']
shared_link_deps = [x[1] for x in link_deps if x[0] == 'shared']
else:
static_link_deps = []
shared_link_deps = []
# Only write the lists if they are non-empty.
if static_libs or static_link_deps:
self.WriteLn('')
self.WriteList(static_libs + static_link_deps,
'LOCAL_STATIC_LIBRARIES')
self.WriteLn('# Enable grouping to fix circular references')
self.WriteLn('LOCAL_GROUP_STATIC_LIBRARIES := true')
if dynamic_libs or shared_link_deps:
self.WriteLn('')
self.WriteList(dynamic_libs + shared_link_deps,
'LOCAL_SHARED_LIBRARIES')
def WriteTarget(self, spec, configs, deps, link_deps, part_of_all,
write_alias_target):
"""Write Makefile code to produce the final target of the gyp spec.
spec, configs: input from gyp.
deps, link_deps: dependency lists; see ComputeDeps()
part_of_all: flag indicating this target is part of 'all'
write_alias_target: flag indicating whether to create short aliases for this
target
"""
self.WriteLn('### Rules for final target.')
if self.type != 'none':
self.WriteTargetFlags(spec, configs, link_deps)
settings = spec.get('aosp_build_settings', {})
if settings:
self.WriteLn('### Set directly by aosp_build_settings.')
for k, v in settings.iteritems():
if isinstance(v, list):
self.WriteList(v, k)
else:
self.WriteLn('%s := %s' % (k, make.QuoteIfNecessary(v)))
self.WriteLn('')
# Add to the set of targets which represent the gyp 'all' target. We use the
# name 'gyp_all_modules' as the Android build system doesn't allow the use
# of the Make target 'all' and because 'all_modules' is the equivalent of
# the Make target 'all' on Android.
if part_of_all and write_alias_target:
self.WriteLn('# Add target alias to "gyp_all_modules" target.')
self.WriteLn('.PHONY: gyp_all_modules')
self.WriteLn('gyp_all_modules: %s' % self.android_module)
self.WriteLn('')
# Add an alias from the gyp target name to the Android module name. This
# simplifies manual builds of the target, and is required by the test
# framework.
if self.target != self.android_module and write_alias_target:
self.WriteLn('# Alias gyp target name.')
self.WriteLn('.PHONY: %s' % self.target)
self.WriteLn('%s: %s' % (self.target, self.android_module))
self.WriteLn('')
# Add the command to trigger build of the target type depending
# on the toolset. Ex: BUILD_STATIC_LIBRARY vs. BUILD_HOST_STATIC_LIBRARY
# NOTE: This has to come last!
modifier = ''
if self.toolset == 'host':
modifier = 'HOST_'
if self.type == 'static_library':
self.WriteLn('include $(BUILD_%sSTATIC_LIBRARY)' % modifier)
elif self.type == 'shared_library':
self.WriteLn('LOCAL_PRELINK_MODULE := false')
self.WriteLn('include $(BUILD_%sSHARED_LIBRARY)' % modifier)
elif self.type == 'executable':
self.WriteLn('LOCAL_CXX_STL := libc++_static')
# Executables are for build and test purposes only, so they're installed
# to a directory that doesn't get included in the system image.
self.WriteLn('LOCAL_MODULE_PATH := $(gyp_shared_intermediate_dir)')
self.WriteLn('include $(BUILD_%sEXECUTABLE)' % modifier)
else:
self.WriteLn('LOCAL_MODULE_PATH := $(PRODUCT_OUT)/gyp_stamp')
self.WriteLn('LOCAL_UNINSTALLABLE_MODULE := true')
if self.toolset == 'target':
self.WriteLn('LOCAL_2ND_ARCH_VAR_PREFIX := $(GYP_VAR_PREFIX)')
else:
self.WriteLn('LOCAL_2ND_ARCH_VAR_PREFIX := $(GYP_HOST_VAR_PREFIX)')
self.WriteLn()
self.WriteLn('include $(BUILD_SYSTEM)/base_rules.mk')
self.WriteLn()
self.WriteLn('$(LOCAL_BUILT_MODULE): $(LOCAL_ADDITIONAL_DEPENDENCIES)')
self.WriteLn('\t$(hide) echo "Gyp timestamp: $@"')
self.WriteLn('\t$(hide) mkdir -p $(dir $@)')
self.WriteLn('\t$(hide) touch $@')
self.WriteLn()
self.WriteLn('LOCAL_2ND_ARCH_VAR_PREFIX :=')
def WriteList(self, value_list, variable=None, prefix='',
quoter=make.QuoteIfNecessary, local_pathify=False):
"""Write a variable definition that is a list of values.
E.g. WriteList(['a','b'], 'foo', prefix='blah') writes out
foo = blaha blahb
but in a pretty-printed style.
"""
values = ''
if value_list:
value_list = [quoter(prefix + l) for l in value_list]
if local_pathify:
value_list = [self.LocalPathify(l) for l in value_list]
values = ' \\\n\t' + ' \\\n\t'.join(value_list)
self.fp.write('%s :=%s\n\n' % (variable, values))
def WriteLn(self, text=''):
self.fp.write(text + '\n')
def LocalPathify(self, path):
"""Convert a subdirectory-relative path into a normalized path which starts
with the make variable $(LOCAL_PATH) (i.e. the top of the project tree).
Absolute paths, or paths that contain variables, are just normalized."""
if '$(' in path or os.path.isabs(path):
# path is not a file in the project tree in this case, but calling
# normpath is still important for trimming trailing slashes.
return os.path.normpath(path)
local_path = os.path.join('$(LOCAL_PATH)', self.path, path)
local_path = os.path.normpath(local_path)
# Check that normalizing the path didn't ../ itself out of $(LOCAL_PATH)
# - i.e. that the resulting path is still inside the project tree. The
# path may legitimately have ended up containing just $(LOCAL_PATH), though,
# so we don't look for a slash.
assert local_path.startswith('$(LOCAL_PATH)'), (
'Path %s attempts to escape from gyp path %s !)' % (path, self.path))
return local_path
def ExpandInputRoot(self, template, expansion, dirname):
if '%(INPUT_ROOT)s' not in template and '%(INPUT_DIRNAME)s' not in template:
return template
path = template % {
'INPUT_ROOT': expansion,
'INPUT_DIRNAME': dirname,
}
return os.path.normpath(path)
def PerformBuild(data, configurations, params):
# The android backend only supports the default configuration.
options = params['options']
makefile = os.path.abspath(os.path.join(options.toplevel_dir,
'GypAndroid.mk'))
env = dict(os.environ)
env['ONE_SHOT_MAKEFILE'] = makefile
arguments = ['make', '-C', os.environ['ANDROID_BUILD_TOP'], 'gyp_all_modules']
print 'Building: %s' % arguments
subprocess.check_call(arguments, env=env)
def GenerateOutput(target_list, target_dicts, data, params):
options = params['options']
generator_flags = params.get('generator_flags', {})
builddir_name = generator_flags.get('output_dir', 'out')
limit_to_target_all = generator_flags.get('limit_to_target_all', False)
write_alias_targets = generator_flags.get('write_alias_targets', True)
sdk_version = generator_flags.get('aosp_sdk_version', 0)
android_top_dir = os.environ.get('ANDROID_BUILD_TOP')
assert android_top_dir, '$ANDROID_BUILD_TOP not set; you need to run lunch.'
def CalculateMakefilePath(build_file, base_name):
"""Determine where to write a Makefile for a given gyp file."""
# Paths in gyp files are relative to the .gyp file, but we want
# paths relative to the source root for the master makefile. Grab
# the path of the .gyp file as the base to relativize against.
# E.g. "foo/bar" when we're constructing targets for "foo/bar/baz.gyp".
base_path = gyp.common.RelativePath(os.path.dirname(build_file),
options.depth)
# We write the file in the base_path directory.
output_file = os.path.join(options.depth, base_path, base_name)
assert not options.generator_output, (
'The Android backend does not support options.generator_output.')
base_path = gyp.common.RelativePath(os.path.dirname(build_file),
options.toplevel_dir)
return base_path, output_file
# TODO: search for the first non-'Default' target. This can go
# away when we add verification that all targets have the
# necessary configurations.
default_configuration = None
toolsets = set([target_dicts[target]['toolset'] for target in target_list])
for target in target_list:
spec = target_dicts[target]
if spec['default_configuration'] != 'Default':
default_configuration = spec['default_configuration']
break
if not default_configuration:
default_configuration = 'Default'
srcdir = '.'
makefile_name = 'GypAndroid' + options.suffix + '.mk'
makefile_path = os.path.join(options.toplevel_dir, makefile_name)
assert not options.generator_output, (
'The Android backend does not support options.generator_output.')
gyp.common.EnsureDirExists(makefile_path)
root_makefile = open(makefile_path, 'w')
root_makefile.write(header)
# We set LOCAL_PATH just once, here, to the top of the project tree. This
# allows all the other paths we use to be relative to the Android.mk file,
# as the Android build system expects.
root_makefile.write('\nLOCAL_PATH := $(call my-dir)\n')
# Find the list of targets that derive from the gyp file(s) being built.
needed_targets = set()
for build_file in params['build_files']:
for target in gyp.common.AllTargets(target_list, target_dicts, build_file):
needed_targets.add(target)
build_files = set()
include_list = set()
android_modules = {}
for qualified_target in target_list:
build_file, target, toolset = gyp.common.ParseQualifiedTarget(
qualified_target)
relative_build_file = gyp.common.RelativePath(build_file,
options.toplevel_dir)
build_files.add(relative_build_file)
included_files = data[build_file]['included_files']
for included_file in included_files:
# The included_files entries are relative to the dir of the build file
# that included them, so we have to undo that and then make them relative
# to the root dir.
relative_include_file = gyp.common.RelativePath(
gyp.common.UnrelativePath(included_file, build_file),
options.toplevel_dir)
abs_include_file = os.path.abspath(relative_include_file)
# If the include file is from the ~/.gyp dir, we should use absolute path
# so that relocating the src dir doesn't break the path.
if (params['home_dot_gyp'] and
abs_include_file.startswith(params['home_dot_gyp'])):
build_files.add(abs_include_file)
else:
build_files.add(relative_include_file)
base_path, output_file = CalculateMakefilePath(build_file,
target + '.' + toolset + options.suffix + '.mk')
spec = target_dicts[qualified_target]
configs = spec['configurations']
part_of_all = qualified_target in needed_targets
if limit_to_target_all and not part_of_all:
continue
relative_target = gyp.common.QualifiedTarget(relative_build_file, target,
toolset)
writer = AndroidMkWriter(android_top_dir)
android_module = writer.Write(qualified_target, relative_target, base_path,
output_file, spec, configs,
part_of_all=part_of_all,
write_alias_target=write_alias_targets,
sdk_version=sdk_version)
if android_module in android_modules:
print ('ERROR: Android module names must be unique. The following '
'targets both generate Android module name %s.\n %s\n %s' %
(android_module, android_modules[android_module],
qualified_target))
return
android_modules[android_module] = qualified_target
# Our root_makefile lives at the source root. Compute the relative path
# from there to the output_file for including.
mkfile_rel_path = gyp.common.RelativePath(output_file,
os.path.dirname(makefile_path))
include_list.add(mkfile_rel_path)
root_makefile.write('GYP_CONFIGURATION ?= %s\n' % default_configuration)
root_makefile.write('GYP_VAR_PREFIX ?=\n')
root_makefile.write('GYP_HOST_VAR_PREFIX ?=\n')
root_makefile.write('GYP_HOST_MULTILIB ?= first\n')
# Write out the sorted list of includes.
root_makefile.write('\n')
for include_file in sorted(include_list):
root_makefile.write('include $(LOCAL_PATH)/' + include_file + '\n')
root_makefile.write('\n')
if write_alias_targets:
root_makefile.write(ALL_MODULES_FOOTER)
root_makefile.close()
|
mit
|
anaran/olympia
|
apps/amo/helpers.py
|
1
|
20802
|
import collections
import json as jsonlib
import os
import random
import re
from operator import attrgetter
from urlparse import urljoin
from django.conf import settings
from django.core.exceptions import ObjectDoesNotExist
from django.forms import CheckboxInput
from django.utils import translation
from django.utils.encoding import smart_unicode
from django.template import defaultfilters
import caching.base as caching
import jinja2
import six
from babel.support import Format
from jingo import register, env
# Needed to make sure our own |f filter overrides jingo's one.
from jingo import helpers # noqa
from jingo_minify.helpers import (_build_html, _get_compiled_css_url, get_path,
is_external)
from tower import ugettext as _, strip_whitespace
import amo
from amo import utils, urlresolvers
from constants.licenses import PERSONA_LICENSES_IDS
from translations.query import order_by_translation
from translations.helpers import truncate
# Yanking filters from Django.
register.filter(defaultfilters.slugify)
# Registering some utils as filters:
urlparams = register.filter(utils.urlparams)
register.filter(utils.epoch)
register.filter(utils.isotime)
register.function(dict)
register.function(utils.randslice)
@register.filter
def link(item):
html = """<a href="%s">%s</a>""" % (item.get_url_path(),
jinja2.escape(item.name))
return jinja2.Markup(html)
@register.filter
def xssafe(value):
"""
Like |safe but for strings with interpolation.
By using |xssafe you assert that you have written tests proving an
XSS can't happen here.
"""
return jinja2.Markup(value)
@register.filter
def babel_datetime(dt, format='medium'):
return _get_format().datetime(dt, format=format) if dt else ''
@register.filter
def babel_date(date, format='medium'):
return _get_format().date(date, format=format) if date else ''
@register.function
def locale_url(url):
"""Take a URL and give it the locale prefix."""
prefixer = urlresolvers.get_url_prefix()
script = prefixer.request.META['SCRIPT_NAME']
parts = [script, prefixer.locale, url.lstrip('/')]
return '/'.join(parts)
@register.inclusion_tag('includes/refinements.html')
@jinja2.contextfunction
def refinements(context, items, title, thing):
d = dict(context.items())
d.update(items=items, title=title, thing=thing)
return d
@register.function
def url(viewname, *args, **kwargs):
"""Helper for Django's ``reverse`` in templates."""
add_prefix = kwargs.pop('add_prefix', True)
host = kwargs.pop('host', '')
src = kwargs.pop('src', '')
url = '%s%s' % (host, urlresolvers.reverse(viewname,
args=args,
kwargs=kwargs,
add_prefix=add_prefix))
if src:
url = urlparams(url, src=src)
return url
@register.function
def shared_url(viewname, addon, *args, **kwargs):
"""
Helper specifically for addons or apps to get urls. Requires
the viewname, addon (or app). It's assumed that we'll pass the
slug into the args and we'll look up the right slug (addon or app)
for you.
Viewname should be a normal view eg: `addons.details` or `apps.details`.
`addons.details` becomes `apps.details`, if we've passed an app, etc.
A viewname such as `details` becomes `addons.details` or `apps.details`,
depending on the add-on type.
"""
namespace, dot, latter = viewname.partition('.')
# If `viewname` is prefixed with `addons.` but we're linking to a
# webapp, the `viewname` magically gets prefixed with `apps.`.
if namespace in ('addons', 'apps'):
viewname = latter
# Otherwise, we just slap the appropriate prefix in front of `viewname`.
viewname = '.'.join(['addons', viewname])
return url(viewname, *([addon.slug] + list(args)), **kwargs)
@register.function
def services_url(viewname, *args, **kwargs):
"""Helper for ``url`` with host=SERVICES_URL."""
kwargs.update({'host': settings.SERVICES_URL})
return url(viewname, *args, **kwargs)
@register.filter
def paginator(pager):
return Paginator(pager).render()
@register.filter
def impala_paginator(pager):
t = env.get_template('amo/impala/paginator.html')
return jinja2.Markup(t.render({'pager': pager}))
@register.filter
def mobile_paginator(pager):
t = env.get_template('amo/mobile/paginator.html')
return jinja2.Markup(t.render({'pager': pager}))
@register.filter
def mobile_impala_paginator(pager):
# Impala-style paginator that is easier to mobilefy.
t = env.get_template('amo/mobile/impala_paginator.html')
return jinja2.Markup(t.render({'pager': pager}))
@register.function
def is_mobile(app):
return app == amo.MOBILE
@register.function
def sidebar(app):
"""Populates the sidebar with (categories, types)."""
from addons.models import Category
if app is None:
return [], []
# We muck with query to make order_by and extra_order_by play nice.
q = Category.objects.filter(application=app.id, weight__gte=0,
type=amo.ADDON_EXTENSION)
categories = order_by_translation(q, 'name')
categories.query.extra_order_by.insert(0, 'weight')
Type = collections.namedtuple('Type', 'id name url')
base = urlresolvers.reverse('home')
types = [Type(99, _('Collections'), base + 'collections/')]
shown_types = {
amo.ADDON_PERSONA: urlresolvers.reverse('browse.personas'),
amo.ADDON_DICT: urlresolvers.reverse('browse.language-tools'),
amo.ADDON_SEARCH: urlresolvers.reverse('browse.search-tools'),
amo.ADDON_THEME: urlresolvers.reverse('browse.themes'),
}
titles = dict(amo.ADDON_TYPES,
**{amo.ADDON_DICT: _('Dictionaries & Language Packs')})
for type_, url in shown_types.items():
if type_ in app.types:
types.append(Type(type_, titles[type_], url))
return categories, sorted(types, key=lambda x: x.name)
class Paginator(object):
def __init__(self, pager):
self.pager = pager
self.max = 10
self.span = (self.max - 1) / 2
self.page = pager.number
self.num_pages = pager.paginator.num_pages
self.count = pager.paginator.count
pager.page_range = self.range()
pager.dotted_upper = self.num_pages not in pager.page_range
pager.dotted_lower = 1 not in pager.page_range
def range(self):
"""Return a list of page numbers to show in the paginator."""
page, total, span = self.page, self.num_pages, self.span
if total < self.max:
lower, upper = 0, total
elif page < span + 1:
lower, upper = 0, span * 2
elif page > total - span:
lower, upper = total - span * 2, total
else:
lower, upper = page - span, page + span - 1
return range(max(lower + 1, 1), min(total, upper) + 1)
def render(self):
c = {'pager': self.pager, 'num_pages': self.num_pages,
'count': self.count}
t = env.get_template('amo/paginator.html').render(c)
return jinja2.Markup(t)
def _get_format():
lang = translation.get_language()
return Format(utils.get_locale_from_lang(lang))
@register.filter
def numberfmt(num, format=None):
return _get_format().decimal(num, format)
@register.filter
def currencyfmt(num, currency):
if num is None:
return ''
return _get_format().currency(num, currency)
def page_name(app=None):
"""Determine the correct page name for the given app (or no app)."""
if app:
return _(u'Add-ons for {0}').format(app.pretty)
else:
return _('Add-ons')
@register.function
@jinja2.contextfunction
def login_link(context):
next = context['request'].path
qs = context['request'].GET.urlencode()
if qs:
next += '?' + qs
l = urlparams(urlresolvers.reverse('users.login'), to=next)
return l
@register.function
@jinja2.contextfunction
def page_title(context, title):
title = smart_unicode(title)
base_title = page_name(context['request'].APP)
return u'%s :: %s' % (title, base_title)
@register.function
@jinja2.contextfunction
def breadcrumbs(context, items=list(), add_default=True, crumb_size=40):
"""
show a list of breadcrumbs. If url is None, it won't be a link.
Accepts: [(url, label)]
"""
if add_default:
app = context['request'].APP
crumbs = [(urlresolvers.reverse('home'), page_name(app))]
else:
crumbs = []
# add user-defined breadcrumbs
if items:
try:
crumbs += items
except TypeError:
crumbs.append(items)
crumbs = [(url, truncate(label, crumb_size)) for (url, label) in crumbs]
c = {'breadcrumbs': crumbs}
t = env.get_template('amo/breadcrumbs.html').render(c)
return jinja2.Markup(t)
@register.function
@jinja2.contextfunction
def impala_breadcrumbs(context, items=list(), add_default=True, crumb_size=40):
"""
show a list of breadcrumbs. If url is None, it won't be a link.
Accepts: [(url, label)]
"""
if add_default:
base_title = page_name(context['request'].APP)
crumbs = [(urlresolvers.reverse('home'), base_title)]
else:
crumbs = []
# add user-defined breadcrumbs
if items:
try:
crumbs += items
except TypeError:
crumbs.append(items)
crumbs = [(url, truncate(label, crumb_size)) for (url, label) in crumbs]
c = {'breadcrumbs': crumbs, 'has_home': add_default}
t = env.get_template('amo/impala/breadcrumbs.html').render(c)
return jinja2.Markup(t)
@register.filter
def json(s):
return jsonlib.dumps(s)
@register.filter
def absolutify(url, site=None):
"""Takes a URL and prepends the SITE_URL"""
if url.startswith('http'):
return url
else:
return urljoin(site or settings.SITE_URL, url)
@register.filter
def strip_controls(s):
"""
Strips control characters from a string.
"""
# Translation table of control characters.
control_trans = dict((n, None) for n in xrange(32) if n not in [10, 13])
rv = unicode(s).translate(control_trans)
return jinja2.Markup(rv) if isinstance(s, jinja2.Markup) else rv
@register.filter
def strip_html(s, just_kidding=False):
"""Strips HTML. Confirm lets us opt out easily."""
if just_kidding:
return s
if not s:
return ''
else:
s = re.sub(r'<.*?>', '', smart_unicode(s, errors='ignore'))
return re.sub(r'<.*?>', '', s)
@register.filter
def external_url(url):
"""Bounce a URL off outgoing.mozilla.org."""
return urlresolvers.get_outgoing_url(unicode(url))
@register.filter
def shuffle(sequence):
"""Shuffle a sequence."""
random.shuffle(sequence)
return sequence
@register.function
def license_link(license):
"""Link to a code license, including icon where applicable."""
# If passed in an integer, try to look up the License.
from versions.models import License
if isinstance(license, (long, int)):
if license in PERSONA_LICENSES_IDS:
# Grab built-in license.
license = PERSONA_LICENSES_IDS[license]
else:
# Grab custom license.
license = License.objects.filter(id=license)
if not license.exists():
return ''
license = license[0]
elif not license:
return ''
if not getattr(license, 'builtin', True):
return _('Custom License')
t = env.get_template('amo/license_link.html').render({'license': license})
return jinja2.Markup(t)
@register.function
def field(field, label=None, **attrs):
if label is not None:
field.label = label
# HTML from Django is already escaped.
return jinja2.Markup(u'%s<p>%s%s</p>' %
(field.errors, field.label_tag(),
field.as_widget(attrs=attrs)))
@register.inclusion_tag('amo/category-arrow.html')
@jinja2.contextfunction
def category_arrow(context, key, prefix):
d = dict(context.items())
d.update(key=key, prefix=prefix)
return d
@register.filter
def timesince(time):
if not time:
return u''
ago = defaultfilters.timesince(time)
# L10n: relative time in the past, like '4 days ago'
return _(u'{0} ago').format(ago)
@register.inclusion_tag('amo/recaptcha.html')
@jinja2.contextfunction
def recaptcha(context, form):
d = dict(context.items())
d.update(form=form)
return d
@register.filter
def is_choice_field(value):
try:
return isinstance(value.field.widget, CheckboxInput)
except AttributeError:
pass
@register.inclusion_tag('amo/mobile/sort_by.html')
def mobile_sort_by(base_url, options=None, selected=None, extra_sort_opts=None,
search_filter=None):
if search_filter:
selected = search_filter.field
options = search_filter.opts
if hasattr(search_filter, 'extras'):
options += search_filter.extras
if extra_sort_opts:
options_dict = dict(options + extra_sort_opts)
else:
options_dict = dict(options)
if selected in options_dict:
current = options_dict[selected]
else:
selected, current = options[0] # Default to the first option.
return locals()
@register.function
@jinja2.contextfunction
def cache_buster(context, url):
if 'BUILD_ID' in context:
build = context['BUILD_ID']
else:
if url.endswith('.js'):
build = context['BUILD_ID_JS']
elif url.endswith('.css'):
build = context['BUILD_ID_CSS']
else:
build = context['BUILD_ID_IMG']
return utils.urlparams(url, b=build)
@register.function
@jinja2.contextfunction
def media(context, url):
"""Get a MEDIA_URL link with a cache buster querystring."""
return urljoin(settings.MEDIA_URL, cache_buster(context, url))
@register.function
@jinja2.contextfunction
def static(context, url):
"""Get a STATIC_URL link with a cache buster querystring."""
return urljoin(settings.STATIC_URL, cache_buster(context, url))
@register.function
@jinja2.evalcontextfunction
def attrs(ctx, *args, **kw):
return jinja2.filters.do_xmlattr(ctx, dict(*args, **kw))
@register.function
@jinja2.contextfunction
def side_nav(context, addon_type, category=None):
app = context['request'].APP.id
cat = str(category.id) if category else 'all'
return caching.cached(lambda: _side_nav(context, addon_type, category),
'side-nav-%s-%s-%s' % (app, addon_type, cat))
def _side_nav(context, addon_type, cat):
# Prevent helpers generating circular imports.
from addons.models import Category, AddonType
request = context['request']
qs = Category.objects.filter(weight__gte=0)
if addon_type != amo.ADDON_PERSONA:
qs = qs.filter(application=request.APP.id)
sort_key = attrgetter('weight', 'name')
categories = sorted(qs.filter(type=addon_type), key=sort_key)
if cat:
base_url = cat.get_url_path()
else:
base_url = AddonType(addon_type).get_url_path()
ctx = dict(request=request, base_url=base_url, categories=categories,
addon_type=addon_type, amo=amo)
return jinja2.Markup(env.get_template('amo/side_nav.html').render(ctx))
@register.function
@jinja2.contextfunction
def site_nav(context):
app = context['request'].APP.id
return caching.cached(lambda: _site_nav(context), 'site-nav-%s' % app)
def _site_nav(context):
# Prevent helpers from generating circular imports.
from addons.models import Category
request = context['request']
sorted_cats = lambda qs: sorted(qs, key=attrgetter('weight', 'name'))
extensions = Category.objects.filter(application=request.APP.id,
weight__gte=0, type=amo.ADDON_EXTENSION)
personas = Category.objects.filter(weight__gte=0, type=amo.ADDON_PERSONA)
ctx = dict(request=request, amo=amo,
extensions=sorted_cats(extensions),
personas=sorted_cats(personas))
return jinja2.Markup(env.get_template('amo/site_nav.html').render(ctx))
@register.function
def loc(s):
"""A noop function for strings that are not ready to be localized."""
return strip_whitespace(s)
@register.function
def site_event_type(type):
return amo.SITE_EVENT_CHOICES[type]
@register.function
@jinja2.contextfunction
def remora_url(context, url, lang=None, app=None, prefix=''):
"""Wrapper for urlresolvers.remora_url"""
if lang is None:
_lang = context['LANG']
if _lang:
lang = translation.to_locale(_lang).replace('_', '-')
if app is None:
try:
app = context['APP'].short
except (AttributeError, KeyError):
pass
return urlresolvers.remora_url(url=url, lang=lang, app=app, prefix=prefix)
@register.function
@jinja2.contextfunction
def hasOneToOne(context, obj, attr):
try:
getattr(obj, attr)
return True
except ObjectDoesNotExist:
return False
@register.function
def no_results_amo():
# This prints a "No results found" message. That's all. Carry on.
t = env.get_template('amo/no_results.html').render()
return jinja2.Markup(t)
@register.filter
def f(string, *args, **kwargs):
"""This overrides jingo.helpers.f to convert input to unicode if needed.
This is needed because of
https://github.com/jbalogh/jingo/pull/54#issuecomment-36728948
"""
if not isinstance(string, six.text_type):
string = six.text_type(string)
return string.format(*args, **kwargs)
def _relative_to_absolute(url):
"""
Prepends relative URLs with STATIC_URL to turn those inline-able.
This method is intended to be used as a ``replace`` parameter of
``re.sub``.
"""
url = url.group(1).strip('"\'')
if not url.startswith(('data:', 'http:', 'https:', '//')):
url = url.replace('../../', settings.STATIC_URL)
return 'url(%s)' % url
@register.function
def inline_css(bundle, media=False, debug=None):
"""
If we are in debug mode, just output a single style tag for each css file.
If we are not in debug mode, return a style that contains bundle-min.css.
Forces a regular css() call for external URLs (no inline allowed).
Extracted from jingo-minify and re-registered, see:
https://github.com/jsocol/jingo-minify/pull/41
Added: turns relative links to absolute ones using STATIC_URL.
"""
if debug is None:
debug = getattr(settings, 'TEMPLATE_DEBUG', False)
if debug:
items = [_get_compiled_css_url(i)
for i in settings.MINIFY_BUNDLES['css'][bundle]]
else:
items = ['css/%s-min.css' % bundle]
if not media:
media = getattr(settings, 'CSS_MEDIA_DEFAULT', 'screen,projection,tv')
contents = []
for css in items:
if is_external(css):
return _build_html([css], '<link rel="stylesheet" media="%s" '
'href="%%s" />' % media)
with open(get_path(css), 'r') as f:
css_content = f.read()
css_parsed = re.sub(r'url\(([^)]*?)\)',
_relative_to_absolute,
css_content)
contents.append(css_parsed)
return _build_html(contents, '<style type="text/css" media="%s">%%s'
'</style>' % media)
# A (temporary?) copy of this is in services/utils.py. See bug 1055654.
def user_media_path(what):
"""Make it possible to override storage paths in settings.
By default, all storage paths are in the MEDIA_ROOT.
This is backwards compatible.
"""
default = os.path.join(settings.MEDIA_ROOT, what)
key = "{0}_PATH".format(what.upper())
return getattr(settings, key, default)
# A (temporary?) copy of this is in services/utils.py. See bug 1055654.
def user_media_url(what):
"""
Generate default media url, and make possible to override it from
settings.
"""
default = '%s%s/' % (settings.MEDIA_URL, what)
key = "{0}_URL".format(what.upper().replace('-', '_'))
return getattr(settings, key, default)
def id_to_path(pk):
"""
Generate a path from an id, to distribute folders in the file system.
1 => 1/1/1
12 => 2/12/12
123456 => 6/56/123456
"""
pk = str(pk)
path = [pk[-1]]
if len(pk) >= 2:
path.append(pk[-2:])
else:
path.append(pk)
path.append(pk)
return os.path.join(*path)
|
bsd-3-clause
|
packetbeat/gopacket
|
layers/test_creator.py
|
84
|
3683
|
#!/usr/bin/python
# Copyright 2012 Google, Inc. All rights reserved.
"""TestCreator creates test templates from pcap files."""
import argparse
import base64
import glob
import re
import string
import subprocess
import sys
class Packet(object):
"""Helper class encapsulating packet from a pcap file."""
def __init__(self, packet_lines):
self.packet_lines = packet_lines
self.data = self._DecodeText(packet_lines)
@classmethod
def _DecodeText(cls, packet_lines):
packet_bytes = []
# First line is timestamp and stuff, skip it.
# Format: 0x0010: 0000 0020 3aff 3ffe 0000 0000 0000 0000 ....:.?.........
for line in packet_lines[1:]:
m = re.match(r'\s+0x[a-f\d]+:\s+((?:[\da-f]{2,4}\s)*)', line, re.IGNORECASE)
if m is None: continue
for hexpart in m.group(1).split():
packet_bytes.append(base64.b16decode(hexpart.upper()))
return ''.join(packet_bytes)
def Test(self, name, link_type):
"""Yields a test using this packet, as a set of lines."""
yield '// testPacket%s is the packet:' % name
for line in self.packet_lines:
yield '// ' + line
yield 'var testPacket%s = []byte{' % name
data = list(self.data)
while data:
linebytes, data = data[:16], data[16:]
yield ''.join(['\t'] + ['0x%02x, ' % ord(c) for c in linebytes])
yield '}'
yield 'func TestPacket%s(t *testing.T) {' % name
yield '\tp := gopacket.NewPacket(testPacket%s, LinkType%s, gopacket.Default)' % (name, link_type)
yield '\tif p.ErrorLayer() != nil {'
yield '\t\tt.Error("Failed to decode packet:", p.ErrorLayer().Error())'
yield '\t}'
yield '\tcheckLayers(p, []gopacket.LayerType{LayerType%s, FILL_ME_IN_WITH_ACTUAL_LAYERS}, t)' % link_type
yield '}'
yield 'func BenchmarkDecodePacket%s(b *testing.B) {' % name
yield '\tfor i := 0; i < b.N; i++ {'
yield '\t\tgopacket.NewPacket(testPacket%s, LinkType%s, gopacket.NoCopy)' % (name, link_type)
yield '\t}'
yield '}'
def GetTcpdumpOutput(filename):
"""Runs tcpdump on the given file, returning output as string."""
return subprocess.check_output(
['tcpdump', '-XX', '-s', '0', '-n', '-r', filename])
def TcpdumpOutputToPackets(output):
"""Reads a pcap file with TCPDump, yielding Packet objects."""
pdata = []
for line in output.splitlines():
if line[0] not in string.whitespace and pdata:
yield Packet(pdata)
pdata = []
pdata.append(line)
if pdata:
yield Packet(pdata)
def main():
class CustomHelpFormatter(argparse.ArgumentDefaultsHelpFormatter):
def _format_usage(self, usage, actions, groups, prefix=None):
header =('TestCreator creates gopacket tests using a pcap file.\n\n'
'Tests are written to standard out... they can then be \n'
'copied into the file of your choice and modified as \n'
'you see.\n\n')
return header + argparse.ArgumentDefaultsHelpFormatter._format_usage(
self, usage, actions, groups, prefix)
parser = argparse.ArgumentParser(formatter_class=CustomHelpFormatter)
parser.add_argument('--link_type', default='Ethernet', help='the link type (default: %(default)s)')
parser.add_argument('--name', default='Packet%d', help='the layer type, must have "%d" inside it')
parser.add_argument('files', metavar='file.pcap', type=str, nargs='+', help='the files to process')
args = parser.parse_args()
for arg in args.files:
for path in glob.glob(arg):
for i, packet in enumerate(TcpdumpOutputToPackets(GetTcpdumpOutput(path))):
print '\n'.join(packet.Test(
args.name % i, args.link_type))
if __name__ == '__main__':
main()
|
bsd-3-clause
|
armersong/zato
|
code/zato-common/src/zato/common/markov_passwords.py
|
6
|
11748
|
# coding: utf8
"""
Use Markov chains to generate random text that sounds Japanese.
This makes random pronounceable passwords that are both strong and easy
to memorize.
Of course English or any other language could be used in the sample text.
See more details at http://exyr.org/2011/random-pronounceable-passwords/
Author: Simon Sapin
License: BSD
Slightly adopted for Zato by adding more languages in addition to Japanese
and splitting the resulting string into dash-separated groups.
"""
from __future__ import division
import string
import itertools
import random
from collections import defaultdict
# Zato
from zato.common.util import grouper
# This is a romanization of the opening of "Genji Monogatari"
# by Murasaki Shikibu.
# Source: http://etext.lib.virginia.edu/japanese/genji/roman.html
japanese = '''
Idure no ohom-toki ni ka, nyougo, kaui amata saburahi tamahi keru naka ni,
ito yamgotonaki kiha ni ha ara nu ga, sugurete tokimeki tamahu ari keri.
Hazime yori ware ha to omohi agari tamahe ru ohom-kata-gata, mezamasiki mono ni
otosime sonemi tamahu. Onazi hodo, sore yori gerahu no kaui-tati ha, masite
yasukara zu. Asa-yuhu no miya-dukahe ni tuke te mo, hito no kokoro wo nomi
ugokasi, urami wo ohu tumori ni ya ari kem, ito atusiku nari yuki, mono kokoro-
boso-ge ni sato-gati naru wo, iyo-iyo aka zu ahare naru mono ni omohosi te hito
no sosiri wo mo e habakara se tamaha zu, yo no tamesi ni mo nari nu beki ohom-
motenasi nari.
Kamdatime, uhe-bito nado mo, ainaku me wo sobame tutu, "Ito mabayuki hito no
ohom-oboye nari. Morokosi ni mo, kakaru koto no okori ni koso, yo mo midare,
asikari kere" to, yau-yau amenosita ni mo adikinau, hito no mote-nayami-gusa ni
nari te, Yauki-hi no tamesi mo hiki ide tu beku nariyuku ni, ito hasitanaki koto
ohokare do, katazikenaki mi-kokoro-bahe no taguhi naki wo tanomi ni te mazirahi
tamahu.
TiTi no Dainagon ha nakunari te haha Kita-no-kata nam inisihe no yosi aru ni te,
oya uti-gusi, sasi-atari te yo no oboye hanayaka naru ohom-kata-gata ni mo itau
otora zu, nani-goto no gisiki wo mo motenasi tamahi kere do, tori-tate te haka-
bakasiki usiro-mi si nakere ba, koto aru toki ha, naho yori-dokoro naku kokoro-
boso-ge nari.
Saki no yo ni mo ohom-tigiri ya hukakari kem, yo ni naku kiyora naru tama no
wonoko miko sahe umare tamahi nu. Itusika to kokoro-motonagara se tamahi te,
isogi mawirase te go-ran-zuru ni, meduraka naru tigo no ohom-katati nari.
Iti-no-Miko ha, Udaizin no Nyougo no ohom-hara ni te, yose omoku, utagahi naki
Mauke-no-kimi to, yo ni mote-kasiduki kikoyure do, kono ohom-nihohi ni ha narabi
tamahu beku mo ara zari kere ba, ohokata no yamgotonaki ohom-omohi ni te, kono
Kimi wo ba, watakusi-mono ni omohosi kasiduki tamahu koto kagiri nasi.
Hazime yori osinabete no uhe-miya-dukahe si tamahu beki kiha ni ha ara zari ki.
Oboye ito yamgotonaku, zyauzu-mekasi kere do, warinaku matuhasa se tamahu amari
ni, sarubeki ohom-asobi no wori-wori, nani-goto ni mo yuwe aru koto no husi-busi
ni ha, madu mau-nobora se tamahu. Aru-toki ni ha ohotono-gomori sugusi te,
yagate saburahase tamahi nado, anagati ni o-mahe sara zu mote-nasa se tamahi si
hodo ni, onodukara karoki kata ni mo miye si wo, kono Miko umare tamahi te noti
ha, ito kokoro koto ni omohosi oki te tare ba, Bau ni mo, you se zu ha, kono
Miko no wi tamahu beki na'meri to, Ichi-no-Miko no Nyougo ha obosi utagahe ri.
Hito yori saki ni mawiri tamahi te, yamgotonaki ohom-omohi nabete nara zu, Miko-
tati nado mo ohasimase ba, kono Ohom-kata no ohom-isame wo nomi zo, naho
wadurahasiu kokoro-gurusiu omohi kikoye sase tamahi keru.
Kasikoki mi-kage wo ba tanomi kikoye nagara, otosime kizu wo motome tamahu hito
ha ohoku, waga mi ha ka-yowaku mono-hakanaki arisama ni te, naka-naka naru mono-
omohi wo zo si tamahu. Mi-tubone ha Kiritubo nari. Amata no ohom-Kata-gata wo
sugi sase tamahi te, hima naki o-mahe-watari ni, hito no mi-kokoro wo tukusi
tamahu mo, geni kotowari to miye tari. Mau-nobori tamahu ni mo, amari uti-sikiru
wori-wori ha, uti-hasi, wata-dono no koko kasiko no miti ni, ayasiki waza wo si
tutu, ohom-okuri mukahe no hito no kinu no suso, tahe gataku, masanaki koto mo
ari. Mata aru toki ni ha, e sara nu me-dau no to wo sasi-kome, konata kanata
kokoro wo ahase te, hasitaname wadurahase tamahu toki mo ohokari. Koto ni hure
te kazu sira zu kurusiki koto nomi masare ba, ito itau omohi wabi taru wo, itodo
ahare to go-ran-zi te, Kourau-den ni motoyori saburahi tamahu Kaui no zausi wo
hoka ni utusa se tamahi te, Uhe-tubone ni tamaha su. Sono urami masite yara m
kata nasi.
'''
# http://www.gutenberg.org/files/17544/17544-0.txt
occitan = """
Despen lou ben per compas é mesure,
E mesquemés lou que tas amassat:
Que puch aprés si lou tas despensat,
Den gaigna mes aquo ba a labenture.
XVI.
Tant quom te sab force argent en la bousse
De toutis es Moussur é coumpaignon
Quan nou nas més delechat és deu mon,
Coum si jamés connegut nou t’augousse.
XVII.
Dits la paraule aprés lauë pensade
A gens segrets que namen pas lou brut:
Atau ne ba deu perpaus quas tengut,
Coume deu bent ou duë peire getade.
XVIII.
Si hés plase helou de boun couratge,
Sapies aqui, gouerdet de tempacha,
Que nou te caille à la fin reproucha,
Qu’eu regast pert, é l’amic, é lou gatge.
XIX.
Si bos auë peus bounis locs l’entrade,
Saget de hé coume beses que hen,
Nou sies fachous, ny broutous, ni bilén,
Ni lampourné, coume bere mainade.
XX.
Si ta bertut force de ben s’amasse,
Parens caitious bergoine nou te hén:
Qu’et beau mesleu que lou darré bilen,
Este prumé gentilhomme en sa race.
XXI.
A tribailla hé tout se que tu pousques
Esburbe-te per tout de la doun és:
Praube mestié que i d’vn truque taulés,
D’vn pan derdut, enjourrit, bade mousques.
XXII.
Nou hiques pas en ta grane coulére
Que tu madich nout pousques matiga:
Aquet que sab soun bici castiga,
Per dessus touts lo u plus sage s’apere
XXIII.
Quan as lou temps de poude hé la cause,
Nac boutes pas à tantos ou douman,
Qui per vn cop d’agine de la man,
N’abigne plus ni lou temps, n’i la pause.
XXIV.
Nou t’anes pas cargua de fantasies,
Mes tot gaujous agerge tous quehés:
Nou darés ourdes à cinq targes dahés:
A mil escuts de tas malencounies.
XXV.
Aule escourga sadits om hé la couë
De loungs ahés, charges, é coussoulats:
La populasse aporte tant de caps,
Que mes escoute, oun mes om l’arrasouë.
XXVI.
Las sages nou disen pas en bades,
Hemne que bo tant de joies pourta,
Si sous mouiens nac poden supourta,
Ou be hé mau, ou he pourte las brages.
XXVII.
Tut troũpes plan mes souuen se t’uesperes
De toun parent, ben, ou coumoditat,
Tu sabes trop sac as esprimentat,
Que males son de ton hust las esteres,
XXVIII.
Nou hasses mau d’aqueste, ou daute sorte,
Pensan qu’aprés degun nac sabera,
Tu nou pouïres tant lou houec capera,
Q’a la perfin la humade nou sorte.
XXIX.
Si toun prouheit d’entreprene t’assajes,
Nou creignes pas aquets que nan despieit,
Ni l’embejous, que name toun prouheit,
Que pan é bin, sapere tu ten ajes.
XXX.
Nou sies daquets qu’espousaran vë More,
Vn arrebrec, mes qu’age force argent.
Si nas mouillé de quauque boune gent;
L’argent s’en ba, é la bestio demoro.
XXXI.
D’ome trichot, jogue tout, encoublaire,
Nou hasses pas amic, é coumpaignoun,
Puch quet nou biu que de troũpa lou moun,
De t’aguerri, nou s’endare pas gouaire.
XXXII.
Si nou las heit, nous bantes de l’oubratje,
Ni d’autru ben nou prenges la banson,
Ou descridat seras per tout lou moun,
Lairon d’aunou, que nes pas petit gatje.
"""
breton = """
.NHO
40 MEULEUDI SANTEZ ANNA
.NTO
Va Breudeur ker, ne laeromp ket loden an Aotrou
Doue. Hen dreist pep tra, eo hen deuz great ar Verc'hez
Sakr ar pez ma zeo. Eva, ar genta maouez, a gollas ar
bed : Eur verc'h da Eva eo a dle hen savetei. Ha Doue
a bell, a bourchassas, a aozas pep tra evit kas da benn
ar pez hen doa rezolvet dre druez ouzomp.
Mez ma reaz evit ober euz ar Verc'hez eur grouadurez
ker pur, burzud var burzud, Santez Anna a reas he lod.
1° Sellit outhi : He merc'h c'hoas iaouankik flamm,
a zo en he c'hichen o teski lezen Doue.
2° D'an eil Santez Anna n'he doa krouadur ebet
nemet-hi. Ha koulskoude, d'an oad a dri bloas, Mari a
zo kaset d'an templ ha roet da Zoue.
Tadou ha mammou, setu aze ho skouer. Peurvuia
eur c'hrouadur a vez ar pez ma vez great : nebeut a
drec'h wen. En em glemm a rear euz ar vugale ; guel-
loc'h e ve en em glemm euz ar re nebeut a zoursi a vez
bet kemeret d'ho c'helen ervad.
Eürus an nep er bed-ma, a ra evit Doue, evit he
nesa, evithan he unan, ar pez a c'houlenn mad an ene.
Koulskoude, hag e tiguesfe ganheomp, kueza er
pec'het, ne gollomp ket a fizians.
Eur sant hen deus bet lavaret divar benn ar Verc'hes :
omnipotentia supplex. Me 'lavaro da m'zro, Santez
Anna dre he feden a zeuio a benn euz kement e dezho
c'hoant. Ar Verc'hez a zo he merc'h, Jesus-Krist a zo
he mab bihan : Galloud he deuz eta dirag Doue. Ha
hent all, ne c'hello ket mankout a garantez evidomp
ni bugale ar beg douar ma a zo en em wlestet dezhi.
.NHO
MEULEUDI SANTEZ ANNA 41
.NTO
En em erbedomp eta outhi gant fizians. Mar d'homp
mad, hi hor jikouro da genderc'hel ; mar bevomp er
pec'het, hi a astenno d'heomp he dourn evit hor zevel
da genta ha rei d'heomp nerz da jomm en hor zao beteg
ar fin.
AMEN.
.NPO
"""
def pairwise(iterable):
"""
Yield pairs of consecutive elements in iterable.
>>> list(pairwise('abcd'))
[('a', 'b'), ('b', 'c'), ('c', 'd')]
"""
iterator = iter(iterable)
try:
a = iterator.next()
except StopIteration:
return
for b in iterator:
yield a, b
a = b
class MarkovChain(object):
"""
If a system transits from a state to another and the next state depends
only on the current state and not the past, it is said to be a Markov chain.
It is determined by the probability of each next state from any current
state.
See http://en.wikipedia.org/wiki/Markov_chain
The probabilities are built from the frequencies in the `sample` chain.
Elements of the sample that are not a valid state are ignored.
"""
def __init__(self, sample):
self.counts = counts = defaultdict(lambda: defaultdict(int))
for current, next in pairwise(sample):
counts[current][next] += 1
self.totals = dict(
(current, sum(next_counts.itervalues()))
for current, next_counts in counts.iteritems()
)
def next(self, state):
"""
Choose at random and return a next state from a current state,
according to the probabilities for this chain
"""
nexts = self.counts[state].iteritems()
# Like random.choice() but with a different weight for each element
rand = random.randrange(0, self.totals[state])
# Using bisection here could be faster, but simplicity prevailed.
# (Also it’s not that slow with 26 states or so.)
for next_state, weight in nexts:
if rand < weight:
return next_state
rand -= weight
def __iter__(self):
"""
Return an infinite iterator of states.
"""
state = random.choice(self.counts.keys())
while True:
state = self.next(state)
yield state
def generate_password(length=16):
chain = MarkovChain(
c for c in japanese.lower() + occitan.lower() + breton.lower() if c in string.ascii_lowercase
)
return '-'.join(''.join(elems) for elems in (grouper(4, ''.join(itertools.islice(chain, length)))))
|
gpl-3.0
|
Alex-Chizhov/python_training
|
home_work_4_fixture/test_add_group_and_contact.py
|
1
|
1218
|
# -*- coding: utf-8 -*-
from group import Group
from info_contact import Infos
from application import Application
import pytest
@pytest.fixture
def app(request):
fixture = Application()
request.addfinalizer(fixture.destroy)
return fixture
def test_add_group(app):
app.login( username="admin", password="secret")
app.group_creator( Group(name="123", header="123", footer="123"))
app.logout()
def test_add_contact(app):
app.login( username="admin", password="secret")
app.link_add_new()
app.fill_form_contact( Infos(firstname="qq", middelname="qq", lastname="qq", nickname="qq", title="qq", company="qq",
addres="ww", home="11", mobile="22", fax="22", homepage="wewr.ru", day_Birthday="[7]",
month_Birthday="[10]", year_Birthday="1980", day_Anniversary="[18]", month_Anniversary="[7]",
year_Anniversary="2000", address2="12", phone2="12", notes="12", work ='qwe',photo ="C:\\Users\\Alex\\Documents\\GitHub\\python_training\\home_work_3\\avatar.jpg"))
app.input_save_form()
app.logout()
def test_add_empty_group(app):
app.login( username="admin", password="secret")
app.group_creator( Group(name="", header="", footer=""))
app.logout()
|
apache-2.0
|
zaccoz/odoo
|
addons/website_mail_group/models/mail_group.py
|
321
|
2678
|
# -*- coding: utf-8 -*-
from openerp.osv import osv
from openerp import tools
from openerp.tools.translate import _
from openerp.tools.safe_eval import safe_eval as eval
from openerp.addons.website.models.website import slug
class MailGroup(osv.Model):
_inherit = 'mail.group'
def message_get_email_values(self, cr, uid, id, notif_mail=None, context=None):
res = super(MailGroup, self).message_get_email_values(cr, uid, id, notif_mail=notif_mail, context=context)
group = self.browse(cr, uid, id, context=context)
base_url = self.pool['ir.config_parameter'].get_param(cr, uid, 'web.base.url')
headers = {}
if res.get('headers'):
try:
headers = eval(res['headers'])
except Exception:
pass
headers.update({
'List-Archive': '<%s/groups/%s>' % (base_url, slug(group)),
'List-Subscribe': '<%s/groups>' % (base_url),
'List-Unsubscribe': '<%s/groups?unsubscribe>' % (base_url,),
})
res['headers'] = repr(headers)
return res
class MailMail(osv.Model):
_inherit = 'mail.mail'
def send_get_mail_body(self, cr, uid, mail, partner=None, context=None):
""" Short-circuit parent method for mail groups, replace the default
footer with one appropriate for mailing-lists."""
if mail.model == 'mail.group' and mail.res_id:
# no super() call on purpose, no private links that could be quoted!
group = self.pool['mail.group'].browse(cr, uid, mail.res_id, context=context)
base_url = self.pool['ir.config_parameter'].get_param(cr, uid, 'web.base.url')
vals = {
'maillist': _('Mailing-List'),
'post_to': _('Post to'),
'unsub': _('Unsubscribe'),
'mailto': 'mailto:%s@%s' % (group.alias_name, group.alias_domain),
'group_url': '%s/groups/%s' % (base_url, slug(group)),
'unsub_url': '%s/groups?unsubscribe' % (base_url,),
}
footer = """_______________________________________________
%(maillist)s: %(group_url)s
%(post_to)s: %(mailto)s
%(unsub)s: %(unsub_url)s
""" % vals
body = tools.append_content_to_html(mail.body, footer, container_tag='div')
return body
else:
return super(MailMail, self).send_get_mail_body(cr, uid, mail,
partner=partner,
context=context)
|
agpl-3.0
|
zacharyvoase/django-postgres
|
tests/test_project/viewtest/tests.py
|
1
|
1775
|
from contextlib import closing
from django.contrib import auth
from django.core.management import call_command
from django.db import connection
from django.test import TestCase
import models
class ViewTestCase(TestCase):
def setUp(self):
call_command('sync_pgviews', *[], **{})
def test_views_have_been_created(self):
with closing(connection.cursor()) as cur:
cur.execute('''SELECT COUNT(*) FROM pg_views
WHERE viewname LIKE 'viewtest_%';''')
count, = cur.fetchone()
self.assertEqual(count, 3)
def test_wildcard_projection_gets_all_fields_from_projected_model(self):
foo_user = auth.models.User.objects.create(
username='foo', is_superuser=True)
foo_user.set_password('blah')
foo_user.save()
foo_superuser = models.Superusers.objects.get(username='foo')
self.assertEqual(foo_user.id, foo_superuser.id)
self.assertEqual(foo_user.password, foo_superuser.password)
def test_limited_projection_only_gets_selected_fields_from_projected_model(self):
foo_user = auth.models.User.objects.create(
username='foo', is_superuser=True)
foo_user.set_password('blah')
foo_user.save()
foo_simple = models.SimpleUser.objects.get(username='foo')
self.assertEqual(foo_simple.username, foo_user.username)
self.assertEqual(foo_simple.password, foo_user.password)
self.assertFalse(hasattr(foo_simple, 'date_joined'))
def test_queryset_based_view_works_similarly_to_raw_sql(self):
auth.models.User.objects.create(
username='foo', is_staff=True)
self.assertTrue(
models.Staffness.objects.filter(username='foo').exists())
|
unlicense
|
victorbergelin/scikit-learn
|
examples/neighbors/plot_approximate_nearest_neighbors_hyperparameters.py
|
227
|
5170
|
"""
=================================================
Hyper-parameters of Approximate Nearest Neighbors
=================================================
This example demonstrates the behaviour of the
accuracy of the nearest neighbor queries of Locality Sensitive Hashing
Forest as the number of candidates and the number of estimators (trees)
vary.
In the first plot, accuracy is measured with the number of candidates. Here,
the term "number of candidates" refers to maximum bound for the number of
distinct points retrieved from each tree to calculate the distances. Nearest
neighbors are selected from this pool of candidates. Number of estimators is
maintained at three fixed levels (1, 5, 10).
In the second plot, the number of candidates is fixed at 50. Number of trees
is varied and the accuracy is plotted against those values. To measure the
accuracy, the true nearest neighbors are required, therefore
:class:`sklearn.neighbors.NearestNeighbors` is used to compute the exact
neighbors.
"""
from __future__ import division
print(__doc__)
# Author: Maheshakya Wijewardena <[email protected]>
#
# License: BSD 3 clause
###############################################################################
import numpy as np
from sklearn.datasets.samples_generator import make_blobs
from sklearn.neighbors import LSHForest
from sklearn.neighbors import NearestNeighbors
import matplotlib.pyplot as plt
# Initialize size of the database, iterations and required neighbors.
n_samples = 10000
n_features = 100
n_queries = 30
rng = np.random.RandomState(42)
# Generate sample data
X, _ = make_blobs(n_samples=n_samples + n_queries,
n_features=n_features, centers=10,
random_state=0)
X_index = X[:n_samples]
X_query = X[n_samples:]
# Get exact neighbors
nbrs = NearestNeighbors(n_neighbors=1, algorithm='brute',
metric='cosine').fit(X_index)
neighbors_exact = nbrs.kneighbors(X_query, return_distance=False)
# Set `n_candidate` values
n_candidates_values = np.linspace(10, 500, 5).astype(np.int)
n_estimators_for_candidate_value = [1, 5, 10]
n_iter = 10
stds_accuracies = np.zeros((len(n_estimators_for_candidate_value),
n_candidates_values.shape[0]),
dtype=float)
accuracies_c = np.zeros((len(n_estimators_for_candidate_value),
n_candidates_values.shape[0]), dtype=float)
# LSH Forest is a stochastic index: perform several iteration to estimate
# expected accuracy and standard deviation displayed as error bars in
# the plots
for j, value in enumerate(n_estimators_for_candidate_value):
for i, n_candidates in enumerate(n_candidates_values):
accuracy_c = []
for seed in range(n_iter):
lshf = LSHForest(n_estimators=value,
n_candidates=n_candidates, n_neighbors=1,
random_state=seed)
# Build the LSH Forest index
lshf.fit(X_index)
# Get neighbors
neighbors_approx = lshf.kneighbors(X_query,
return_distance=False)
accuracy_c.append(np.sum(np.equal(neighbors_approx,
neighbors_exact)) /
n_queries)
stds_accuracies[j, i] = np.std(accuracy_c)
accuracies_c[j, i] = np.mean(accuracy_c)
# Set `n_estimators` values
n_estimators_values = [1, 5, 10, 20, 30, 40, 50]
accuracies_trees = np.zeros(len(n_estimators_values), dtype=float)
# Calculate average accuracy for each value of `n_estimators`
for i, n_estimators in enumerate(n_estimators_values):
lshf = LSHForest(n_estimators=n_estimators, n_neighbors=1)
# Build the LSH Forest index
lshf.fit(X_index)
# Get neighbors
neighbors_approx = lshf.kneighbors(X_query, return_distance=False)
accuracies_trees[i] = np.sum(np.equal(neighbors_approx,
neighbors_exact))/n_queries
###############################################################################
# Plot the accuracy variation with `n_candidates`
plt.figure()
colors = ['c', 'm', 'y']
for i, n_estimators in enumerate(n_estimators_for_candidate_value):
label = 'n_estimators = %d ' % n_estimators
plt.plot(n_candidates_values, accuracies_c[i, :],
'o-', c=colors[i], label=label)
plt.errorbar(n_candidates_values, accuracies_c[i, :],
stds_accuracies[i, :], c=colors[i])
plt.legend(loc='upper left', fontsize='small')
plt.ylim([0, 1.2])
plt.xlim(min(n_candidates_values), max(n_candidates_values))
plt.ylabel("Accuracy")
plt.xlabel("n_candidates")
plt.grid(which='both')
plt.title("Accuracy variation with n_candidates")
# Plot the accuracy variation with `n_estimators`
plt.figure()
plt.scatter(n_estimators_values, accuracies_trees, c='k')
plt.plot(n_estimators_values, accuracies_trees, c='g')
plt.ylim([0, 1.2])
plt.xlim(min(n_estimators_values), max(n_estimators_values))
plt.ylabel("Accuracy")
plt.xlabel("n_estimators")
plt.grid(which='both')
plt.title("Accuracy variation with n_estimators")
plt.show()
|
bsd-3-clause
|
openhatch/oh-mainline
|
vendor/packages/scrapy/scrapy/utils/reactor.py
|
178
|
1360
|
from twisted.internet import reactor, error
def listen_tcp(portrange, host, factory):
"""Like reactor.listenTCP but tries different ports in a range."""
assert len(portrange) <= 2, "invalid portrange: %s" % portrange
if not hasattr(portrange, '__iter__'):
return reactor.listenTCP(portrange, factory, interface=host)
if not portrange:
return reactor.listenTCP(0, factory, interface=host)
if len(portrange) == 1:
return reactor.listenTCP(portrange[0], factory, interface=host)
for x in range(portrange[0], portrange[1]+1):
try:
return reactor.listenTCP(x, factory, interface=host)
except error.CannotListenError:
if x == portrange[1]:
raise
class CallLaterOnce(object):
"""Schedule a function to be called in the next reactor loop, but only if
it hasn't been already scheduled since the last time it run.
"""
def __init__(self, func, *a, **kw):
self._func = func
self._a = a
self._kw = kw
self._call = None
def schedule(self, delay=0):
if self._call is None:
self._call = reactor.callLater(delay, self)
def cancel(self):
if self._call:
self._call.cancel()
def __call__(self):
self._call = None
return self._func(*self._a, **self._kw)
|
agpl-3.0
|
anilmuthineni/tensorflow
|
tensorflow/python/framework/tensor_shape_test.py
|
79
|
17267
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Functional tests for shape inference helper classes."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.core.framework import tensor_shape_pb2
from tensorflow.python.framework import tensor_shape
from tensorflow.python.framework import test_util
from tensorflow.python.platform import googletest
class DimensionTest(test_util.TensorFlowTestCase):
def testDimension(self):
dim = tensor_shape.Dimension(12)
self.assertEqual(12, dim.value)
self.assertEqual(12, int(dim))
self.assertEqual(dim, tensor_shape.Dimension(12))
self.assertEqual(tensor_shape.Dimension(15),
dim + tensor_shape.Dimension(3))
self.assertEqual(tensor_shape.Dimension(15), dim + 3)
self.assertEqual(tensor_shape.Dimension(24),
dim * tensor_shape.Dimension(2))
self.assertEqual(tensor_shape.Dimension(24), dim * 2)
self.assertEqual(
tensor_shape.Dimension(6), dim // tensor_shape.Dimension(2))
self.assertEqual(tensor_shape.Dimension(6), dim // 2)
self.assertEqual(tensor_shape.Dimension(12),
dim.merge_with(tensor_shape.Dimension(12)))
self.assertEqual(tensor_shape.Dimension(12), dim.merge_with(12))
self.assertLess(tensor_shape.Dimension(12), tensor_shape.Dimension(13))
self.assertGreater(tensor_shape.Dimension(13), tensor_shape.Dimension(12))
self.assertLessEqual(tensor_shape.Dimension(12), tensor_shape.Dimension(12))
self.assertLessEqual(tensor_shape.Dimension(12), tensor_shape.Dimension(13))
self.assertGreater(tensor_shape.Dimension(13), tensor_shape.Dimension(12))
self.assertGreaterEqual(tensor_shape.Dimension(12),
tensor_shape.Dimension(12))
self.assertGreaterEqual(tensor_shape.Dimension(13),
tensor_shape.Dimension(12))
self.assertNotEqual(dim, (12,))
with self.assertRaises(ValueError):
dim.merge_with(tensor_shape.Dimension(13))
def testUnknownDimension(self):
dim = tensor_shape.Dimension(None)
self.assertIs(None, dim.value)
self.assertEqual(dim.value, tensor_shape.Dimension(None).value)
self.assertEqual(tensor_shape.Dimension(None).value,
(dim + tensor_shape.Dimension(None)).value)
self.assertEqual(tensor_shape.Dimension(None).value,
(dim * tensor_shape.Dimension(None)).value)
self.assertEqual(
tensor_shape.Dimension(None).value,
(dim // tensor_shape.Dimension(None)).value)
self.assertEqual(tensor_shape.Dimension(None).value,
dim.merge_with(tensor_shape.Dimension(None)).value)
self.assertIs(None,
tensor_shape.Dimension(None) < tensor_shape.Dimension(None))
self.assertIs(None,
tensor_shape.Dimension(None) <= tensor_shape.Dimension(None))
self.assertIs(None,
tensor_shape.Dimension(None) > tensor_shape.Dimension(None))
self.assertIs(None,
tensor_shape.Dimension(None) >= tensor_shape.Dimension(None))
def testKnownAndUnknownDimensions(self):
known = tensor_shape.Dimension(12)
unknown = tensor_shape.Dimension(None)
self.assertEqual(
tensor_shape.Dimension(None).value, (known + unknown).value)
self.assertEqual(
tensor_shape.Dimension(None).value, (unknown + known).value)
self.assertEqual(
tensor_shape.Dimension(None).value, (known * unknown).value)
self.assertEqual(
tensor_shape.Dimension(None).value, (unknown * known).value)
self.assertEqual(
tensor_shape.Dimension(None).value, (known // unknown).value)
self.assertEqual(
tensor_shape.Dimension(None).value, (unknown // known).value)
self.assertEqual(
tensor_shape.Dimension(12), known.merge_with(unknown))
self.assertEqual(
tensor_shape.Dimension(12), unknown.merge_with(known))
self.assertIs(None,
tensor_shape.Dimension(12) < tensor_shape.Dimension(None))
self.assertIs(None,
tensor_shape.Dimension(12) <= tensor_shape.Dimension(None))
self.assertIs(None,
tensor_shape.Dimension(12) > tensor_shape.Dimension(None))
self.assertIs(None,
tensor_shape.Dimension(12) >= tensor_shape.Dimension(None))
self.assertIs(None,
tensor_shape.Dimension(None) < tensor_shape.Dimension(12))
self.assertIs(None,
tensor_shape.Dimension(None) <= tensor_shape.Dimension(12))
self.assertIs(None,
tensor_shape.Dimension(None) > tensor_shape.Dimension(12))
self.assertIs(None,
tensor_shape.Dimension(None) >= tensor_shape.Dimension(12))
def testAsDimension(self):
self.assertEqual(tensor_shape.Dimension(12),
tensor_shape.as_dimension(tensor_shape.Dimension(12)))
self.assertEqual(tensor_shape.Dimension(12), tensor_shape.as_dimension(12))
self.assertEqual(
tensor_shape.Dimension(None).value,
tensor_shape.as_dimension(tensor_shape.Dimension(None)).value)
self.assertEqual(tensor_shape.Dimension(None).value,
tensor_shape.as_dimension(None).value)
def testEquality(self):
self.assertTrue(tensor_shape.Dimension(12) == tensor_shape.Dimension(12))
self.assertFalse(tensor_shape.Dimension(12) == tensor_shape.Dimension(13))
self.assertIs(None,
tensor_shape.Dimension(12) == tensor_shape.Dimension(None))
self.assertIs(None,
tensor_shape.Dimension(None) == tensor_shape.Dimension(12))
self.assertIs(None,
tensor_shape.Dimension(None) == tensor_shape.Dimension(None))
self.assertTrue(tensor_shape.Dimension(12) == "12")
self.assertTrue(tensor_shape.Dimension(12) == 24.0 / 2)
# None indicates ambiguous comparison, but comparison vs the wrong type
# is unambigously False.
self.assertIsNotNone(tensor_shape.Dimension(12) == "_")
self.assertIsNotNone(tensor_shape.Dimension(None) == 12.99)
self.assertFalse(tensor_shape.Dimension(12) == "_")
self.assertFalse(tensor_shape.Dimension(None) == 12.99)
self.assertIs(None, tensor_shape.Dimension(None) == "13")
self.assertIs(None, tensor_shape.Dimension(None) == None) # pylint: disable=g-equals-none
self.assertFalse(tensor_shape.Dimension(12) == 12.99)
def testInequality(self):
self.assertTrue(tensor_shape.Dimension(12) != tensor_shape.Dimension(13))
self.assertFalse(tensor_shape.Dimension(12) != tensor_shape.Dimension(12))
self.assertIs(None,
tensor_shape.Dimension(12) != tensor_shape.Dimension(None))
self.assertIs(None,
tensor_shape.Dimension(None) != tensor_shape.Dimension(12))
self.assertIs(None,
tensor_shape.Dimension(None) != tensor_shape.Dimension(None))
# None indicates ambiguous comparison, but comparison vs the wrong type
# is unambigously False.
self.assertIsNotNone(tensor_shape.Dimension(12) != "_")
self.assertIsNotNone(tensor_shape.Dimension(None) != 12.99)
self.assertTrue(tensor_shape.Dimension(12) != "_")
self.assertTrue(tensor_shape.Dimension(None) != 12.99)
self.assertIs(None, tensor_shape.Dimension(None) != "13")
self.assertIs(None, tensor_shape.Dimension(None) != None) # pylint: disable=g-equals-none
self.assertTrue(tensor_shape.Dimension(12) != 12.99)
def testRepr(self):
self.assertEqual(repr(tensor_shape.Dimension(7)), "Dimension(7)")
self.assertEqual(repr(tensor_shape.Dimension(None)), "Dimension(None)")
def testStr(self):
self.assertEqual(str(tensor_shape.Dimension(7)), "7")
self.assertEqual(str(tensor_shape.Dimension(None)), "?")
class ShapeTest(test_util.TensorFlowTestCase):
def testUnknownShape(self):
s = tensor_shape.TensorShape(None)
with self.assertRaises(ValueError):
s.assert_is_fully_defined()
self.assertIs(None, s.ndims)
with self.assertRaises(ValueError):
len(s)
self.assertFalse(s)
self.assertIs(None, s.dims)
with self.assertRaises(ValueError):
for _ in tensor_shape.TensorShape(None):
pass
def testFullyDefinedShape(self):
s = tensor_shape.TensorShape([tensor_shape.Dimension(
3), tensor_shape.Dimension(4), tensor_shape.Dimension(7)])
s.assert_is_fully_defined()
self.assertEqual(3, s.ndims)
self.assertEqual(3, len(s))
self.assertTrue(s)
s.assert_has_rank(3)
self.assertEqual([tensor_shape.Dimension(3),
tensor_shape.Dimension(4),
tensor_shape.Dimension(7)], s.dims)
self.assertEqual(tensor_shape.Dimension(3), s[0])
self.assertEqual(tensor_shape.Dimension(4), s[1])
self.assertEqual(tensor_shape.Dimension(7), s[2])
self.assertEqual([3, 4, 7], s.as_list())
s.assert_is_compatible_with([3, 4, 7])
s.assert_same_rank([6, 3, 7])
for d1, d2 in zip(s, [3, 4, 7]):
assert d1.value == d2
def testPartiallyDefinedShape(self):
s = tensor_shape.TensorShape([tensor_shape.Dimension(
3), tensor_shape.Dimension(None), tensor_shape.Dimension(7)])
with self.assertRaises(ValueError):
s.assert_is_fully_defined()
self.assertEqual(3, s.ndims)
self.assertEqual(3, len(s))
self.assertTrue(s)
s.assert_has_rank(3)
self.assertEqual(tensor_shape.Dimension(3), s[0])
self.assertEqual(tensor_shape.Dimension(None).value, s[1].value)
self.assertEqual(tensor_shape.Dimension(7), s[2])
s.assert_same_rank([6, 3, 7])
for d1, d2 in zip(s, [3, None, 7]):
assert d1.value == d2
def testMergeFullShapes(self):
self.assertEqual([3, 4, 7],
tensor_shape.TensorShape([3, 4, 7]).merge_with(
tensor_shape.TensorShape([3, 4, 7])).as_list())
with self.assertRaises(ValueError):
tensor_shape.TensorShape([3, 4, 7]).merge_with(
tensor_shape.TensorShape([6, 3, 7]))
def testMergePartialShapes(self):
s1 = tensor_shape.TensorShape([tensor_shape.Dimension(
3), tensor_shape.Dimension(None), tensor_shape.Dimension(7)])
s2 = tensor_shape.TensorShape([tensor_shape.Dimension(
None), tensor_shape.Dimension(4), tensor_shape.Dimension(7)])
self.assertEqual([3, 4, 7], s1.merge_with(s2).as_list())
def testMergeFullAndUnknownShape(self):
self.assertEqual([3, 4, 7],
tensor_shape.TensorShape([3, 4, 7]).merge_with(
tensor_shape.TensorShape(None)).as_list())
def testSlice(self):
known = tensor_shape.TensorShape([0, 1, 2, 3, 4])
self.assertEqual(tensor_shape.Dimension(2), known[2])
tensor_shape.TensorShape([1, 2, 3]).assert_is_compatible_with(known[1:4])
unknown = tensor_shape.TensorShape(None)
self.assertEqual(tensor_shape.Dimension(None).value, unknown[2].value)
tensor_shape.TensorShape(
[None, None, None]).assert_is_compatible_with(unknown[1:4])
def testConcatenate(self):
tensor_shape.TensorShape([1, 2, 3, 4]).assert_is_compatible_with(
tensor_shape.TensorShape([1, 2]).concatenate(
tensor_shape.TensorShape([3, 4])))
tensor_shape.TensorShape([1, 2, 3, 4]).assert_is_compatible_with(
tensor_shape.TensorShape([1, 2]).concatenate(
tensor_shape.TensorShape(None)))
tensor_shape.TensorShape([1, 2, 3, 4]).assert_is_compatible_with(
tensor_shape.TensorShape(None).concatenate(
tensor_shape.TensorShape([3, 4])))
tensor_shape.TensorShape([1, 2, 3, 4]).assert_is_compatible_with(
tensor_shape.TensorShape(None).concatenate(
tensor_shape.TensorShape(None)))
tensor_shape.TensorShape([1, 2, 3]).assert_is_compatible_with(
tensor_shape.TensorShape([1, 2]).concatenate(
tensor_shape.Dimension(3)))
def testHelpers(self):
tensor_shape.TensorShape([]).assert_is_compatible_with(
tensor_shape.scalar())
tensor_shape.TensorShape([37]).assert_is_compatible_with(
tensor_shape.vector(37))
tensor_shape.TensorShape(
[94, 43]).assert_is_compatible_with(tensor_shape.matrix(94, 43))
def testTruedivFails(self):
unknown = tensor_shape.Dimension(None)
self.assertEqual((unknown // unknown).value, None)
with self.assertRaisesRegexp(TypeError, r"unsupported operand type"):
unknown / unknown # pylint: disable=pointless-statement
def testConvertFromProto(self):
def make_tensor_shape_proto(shape):
return tensor_shape_pb2.TensorShapeProto(
dim=[tensor_shape_pb2.TensorShapeProto.Dim(size=x) for x in shape])
proto = make_tensor_shape_proto([])
self.assertEqual(tensor_shape.TensorShape([]),
tensor_shape.TensorShape(proto))
self.assertEqual(tensor_shape.TensorShape([]),
tensor_shape.as_shape(proto))
proto = make_tensor_shape_proto([1, 37, 42])
self.assertEqual(tensor_shape.TensorShape([1, 37, 42]),
tensor_shape.TensorShape(proto))
self.assertEqual(tensor_shape.TensorShape([1, 37, 42]),
tensor_shape.as_shape(proto))
partial_proto_shape = tensor_shape.as_shape(
make_tensor_shape_proto([-1, 37, 42]))
partial_shape = tensor_shape.TensorShape([None, 37, 42])
self.assertNotEqual(partial_proto_shape, partial_shape)
self.assertEqual(partial_proto_shape[0].value, None)
self.assertEqual(partial_proto_shape[1].value, 37)
self.assertEqual(partial_proto_shape[2].value, 42)
self.assertTrue(partial_shape.is_compatible_with(partial_proto_shape))
def testStr(self):
self.assertEqual("<unknown>", str(tensor_shape.unknown_shape()))
self.assertEqual("(?,)", str(tensor_shape.unknown_shape(ndims=1)))
self.assertEqual("(?, ?)", str(tensor_shape.unknown_shape(ndims=2)))
self.assertEqual("(?, ?, ?)", str(tensor_shape.unknown_shape(ndims=3)))
self.assertEqual("()", str(tensor_shape.scalar()))
self.assertEqual("(7,)", str(tensor_shape.vector(7)))
self.assertEqual("(3, 8)", str(tensor_shape.matrix(3, 8)))
self.assertEqual("(4, 5, 2)", str(tensor_shape.TensorShape([4, 5, 2])))
self.assertEqual("(32, ?, 1, 9)",
str(tensor_shape.TensorShape([32, None, 1, 9])))
def testAsProto(self):
self.assertTrue(tensor_shape.unknown_shape().as_proto().unknown_rank)
self.assertFalse(
tensor_shape.unknown_shape(ndims=3).as_proto().unknown_rank)
self.assertFalse(
tensor_shape.TensorShape([1, 2, 3]).as_proto().unknown_rank)
self.assertFalse(
tensor_shape.TensorShape([1, None, 3]).as_proto().unknown_rank)
def testEquality(self):
s1 = tensor_shape.TensorShape([tensor_shape.Dimension(
3), tensor_shape.Dimension(4), tensor_shape.Dimension(7)])
s2 = tensor_shape.TensorShape([tensor_shape.Dimension(
3), tensor_shape.Dimension(4), tensor_shape.Dimension(7)])
s3 = tensor_shape.TensorShape([tensor_shape.Dimension(3),
tensor_shape.Dimension(4), None])
self.assertTrue(s1 == s2)
self.assertFalse(s1 != s2)
self.assertFalse(s1 == "a string")
self.assertTrue(s1 != "a string")
self.assertNotEqual(s1, "347", "Should not equal an ambiguous string.")
self.assertEqual(s1, ["3", "4", "7"])
# Test with an unknown shape in s3
self.assertTrue(s1 != s3)
self.assertFalse(s3 == "a string")
self.assertTrue(s3 != "a string")
# eq and neq are not symmetric for unknown shapes.
unk0 = tensor_shape.unknown_shape()
self.assertFalse(unk0 == s1)
self.assertFalse(s1 == unk0)
with self.assertRaises(ValueError):
unk0 != s1 # pylint: disable=pointless-statement
with self.assertRaises(ValueError):
s1 != unk0 # pylint: disable=pointless-statement
unk1 = tensor_shape.unknown_shape()
self.assertTrue(unk0 == unk1)
self.assertTrue(unk1 == unk0)
with self.assertRaises(ValueError):
unk0 != unk1 # pylint: disable=pointless-statement
with self.assertRaises(ValueError):
unk1 != unk0 # pylint: disable=pointless-statement
def testAsList(self):
with self.assertRaisesRegexp(ValueError,
"not defined on an unknown TensorShape"):
tensor_shape.unknown_shape().as_list()
self.assertAllEqual([None, None], tensor_shape.unknown_shape(2).as_list())
self.assertAllEqual([2, None, 4], tensor_shape.TensorShape(
(2, None, 4)).as_list())
if __name__ == "__main__":
googletest.main()
|
apache-2.0
|
oktayacikalin/pyglet
|
experimental/input/linux_const.py
|
28
|
8183
|
#!/usr/bin/env python
'''Event constants from /usr/include/linux/input.h
'''
__docformat__ = 'restructuredtext'
__version__ = '$Id$'
EV_SYN = 0x00
EV_KEY = 0x01
EV_REL = 0x02
EV_ABS = 0x03
EV_MSC = 0x04
EV_LED = 0x11
EV_SND = 0x12
EV_REP = 0x14
EV_FF = 0x15
EV_PWR = 0x16
EV_FF_STATUS = 0x17
EV_MAX = 0x1f
# Synchronization events.
SYN_REPORT = 0
SYN_CONFIG = 1
# Keys and buttons
KEY_RESERVED = 0
KEY_ESC = 1
KEY_1 = 2
KEY_2 = 3
KEY_3 = 4
KEY_4 = 5
KEY_5 = 6
KEY_6 = 7
KEY_7 = 8
KEY_8 = 9
KEY_9 = 10
KEY_0 = 11
KEY_MINUS = 12
KEY_EQUAL = 13
KEY_BACKSPACE = 14
KEY_TAB = 15
KEY_Q = 16
KEY_W = 17
KEY_E = 18
KEY_R = 19
KEY_T = 20
KEY_Y = 21
KEY_U = 22
KEY_I = 23
KEY_O = 24
KEY_P = 25
KEY_LEFTBRACE = 26
KEY_RIGHTBRACE = 27
KEY_ENTER = 28
KEY_LEFTCTRL = 29
KEY_A = 30
KEY_S = 31
KEY_D = 32
KEY_F = 33
KEY_G = 34
KEY_H = 35
KEY_J = 36
KEY_K = 37
KEY_L = 38
KEY_SEMICOLON = 39
KEY_APOSTROPHE = 40
KEY_GRAVE = 41
KEY_LEFTSHIFT = 42
KEY_BACKSLASH = 43
KEY_Z = 44
KEY_X = 45
KEY_C = 46
KEY_V = 47
KEY_B = 48
KEY_N = 49
KEY_M = 50
KEY_COMMA = 51
KEY_DOT = 52
KEY_SLASH = 53
KEY_RIGHTSHIFT = 54
KEY_KPASTERISK = 55
KEY_LEFTALT = 56
KEY_SPACE = 57
KEY_CAPSLOCK = 58
KEY_F1 = 59
KEY_F2 = 60
KEY_F3 = 61
KEY_F4 = 62
KEY_F5 = 63
KEY_F6 = 64
KEY_F7 = 65
KEY_F8 = 66
KEY_F9 = 67
KEY_F10 = 68
KEY_NUMLOCK = 69
KEY_SCROLLLOCK = 70
KEY_KP7 = 71
KEY_KP8 = 72
KEY_KP9 = 73
KEY_KPMINUS = 74
KEY_KP4 = 75
KEY_KP5 = 76
KEY_KP6 = 77
KEY_KPPLUS = 78
KEY_KP1 = 79
KEY_KP2 = 80
KEY_KP3 = 81
KEY_KP0 = 82
KEY_KPDOT = 83
KEY_ZENKAKUHANKAKU = 85
KEY_102ND = 86
KEY_F11 = 87
KEY_F12 = 88
KEY_RO = 89
KEY_KATAKANA = 90
KEY_HIRAGANA = 91
KEY_HENKAN = 92
KEY_KATAKANAHIRAGANA = 93
KEY_MUHENKAN = 94
KEY_KPJPCOMMA = 95
KEY_KPENTER = 96
KEY_RIGHTCTRL = 97
KEY_KPSLASH = 98
KEY_SYSRQ = 99
KEY_RIGHTALT = 100
KEY_LINEFEED = 101
KEY_HOME = 102
KEY_UP = 103
KEY_PAGEUP = 104
KEY_LEFT = 105
KEY_RIGHT = 106
KEY_END = 107
KEY_DOWN = 108
KEY_PAGEDOWN = 109
KEY_INSERT = 110
KEY_DELETE = 111
KEY_MACRO = 112
KEY_MUTE = 113
KEY_VOLUMEDOWN = 114
KEY_VOLUMEUP = 115
KEY_POWER = 116
KEY_KPEQUAL = 117
KEY_KPPLUSMINUS = 118
KEY_PAUSE = 119
KEY_KPCOMMA = 121
KEY_HANGUEL = 122
KEY_HANJA = 123
KEY_YEN = 124
KEY_LEFTMETA = 125
KEY_RIGHTMETA = 126
KEY_COMPOSE = 127
KEY_STOP = 128
KEY_AGAIN = 129
KEY_PROPS = 130
KEY_UNDO = 131
KEY_FRONT = 132
KEY_COPY = 133
KEY_OPEN = 134
KEY_PASTE = 135
KEY_FIND = 136
KEY_CUT = 137
KEY_HELP = 138
KEY_MENU = 139
KEY_CALC = 140
KEY_SETUP = 141
KEY_SLEEP = 142
KEY_WAKEUP = 143
KEY_FILE = 144
KEY_SENDFILE = 145
KEY_DELETEFILE = 146
KEY_XFER = 147
KEY_PROG1 = 148
KEY_PROG2 = 149
KEY_WWW = 150
KEY_MSDOS = 151
KEY_COFFEE = 152
KEY_DIRECTION = 153
KEY_CYCLEWINDOWS = 154
KEY_MAIL = 155
KEY_BOOKMARKS = 156
KEY_COMPUTER = 157
KEY_BACK = 158
KEY_FORWARD = 159
KEY_CLOSECD = 160
KEY_EJECTCD = 161
KEY_EJECTCLOSECD = 162
KEY_NEXTSONG = 163
KEY_PLAYPAUSE = 164
KEY_PREVIOUSSONG = 165
KEY_STOPCD = 166
KEY_RECORD = 167
KEY_REWIND = 168
KEY_PHONE = 169
KEY_ISO = 170
KEY_CONFIG = 171
KEY_HOMEPAGE = 172
KEY_REFRESH = 173
KEY_EXIT = 174
KEY_MOVE = 175
KEY_EDIT = 176
KEY_SCROLLUP = 177
KEY_SCROLLDOWN = 178
KEY_KPLEFTPAREN = 179
KEY_KPRIGHTPAREN = 180
KEY_F13 = 183
KEY_F14 = 184
KEY_F15 = 185
KEY_F16 = 186
KEY_F17 = 187
KEY_F18 = 188
KEY_F19 = 189
KEY_F20 = 190
KEY_F21 = 191
KEY_F22 = 192
KEY_F23 = 193
KEY_F24 = 194
KEY_PLAYCD = 200
KEY_PAUSECD = 201
KEY_PROG3 = 202
KEY_PROG4 = 203
KEY_SUSPEND = 205
KEY_CLOSE = 206
KEY_PLAY = 207
KEY_FASTFORWARD = 208
KEY_BASSBOOST = 209
KEY_PRINT = 210
KEY_HP = 211
KEY_CAMERA = 212
KEY_SOUND = 213
KEY_QUESTION = 214
KEY_EMAIL = 215
KEY_CHAT = 216
KEY_SEARCH = 217
KEY_CONNECT = 218
KEY_FINANCE = 219
KEY_SPORT = 220
KEY_SHOP = 221
KEY_ALTERASE = 222
KEY_CANCEL = 223
KEY_BRIGHTNESSDOWN = 224
KEY_BRIGHTNESSUP = 225
KEY_MEDIA = 226
KEY_UNKNOWN = 240
BTN_MISC = 0x100
BTN_0 = 0x100
BTN_1 = 0x101
BTN_2 = 0x102
BTN_3 = 0x103
BTN_4 = 0x104
BTN_5 = 0x105
BTN_6 = 0x106
BTN_7 = 0x107
BTN_8 = 0x108
BTN_9 = 0x109
BTN_MOUSE = 0x110
BTN_LEFT = 0x110
BTN_RIGHT = 0x111
BTN_MIDDLE = 0x112
BTN_SIDE = 0x113
BTN_EXTRA = 0x114
BTN_FORWARD = 0x115
BTN_BACK = 0x116
BTN_TASK = 0x117
BTN_JOYSTICK = 0x120
BTN_TRIGGER = 0x120
BTN_THUMB = 0x121
BTN_THUMB2 = 0x122
BTN_TOP = 0x123
BTN_TOP2 = 0x124
BTN_PINKIE = 0x125
BTN_BASE = 0x126
BTN_BASE2 = 0x127
BTN_BASE3 = 0x128
BTN_BASE4 = 0x129
BTN_BASE5 = 0x12a
BTN_BASE6 = 0x12b
BTN_DEAD = 0x12f
BTN_GAMEPAD = 0x130
BTN_A = 0x130
BTN_B = 0x131
BTN_C = 0x132
BTN_X = 0x133
BTN_Y = 0x134
BTN_Z = 0x135
BTN_TL = 0x136
BTN_TR = 0x137
BTN_TL2 = 0x138
BTN_TR2 = 0x139
BTN_SELECT = 0x13a
BTN_START = 0x13b
BTN_MODE = 0x13c
BTN_THUMBL = 0x13d
BTN_THUMBR = 0x13e
BTN_DIGI = 0x140
BTN_TOOL_PEN = 0x140
BTN_TOOL_RUBBER = 0x141
BTN_TOOL_BRUSH = 0x142
BTN_TOOL_PENCIL = 0x143
BTN_TOOL_AIRBRUSH = 0x144
BTN_TOOL_FINGER = 0x145
BTN_TOOL_MOUSE = 0x146
BTN_TOOL_LENS = 0x147
BTN_TOUCH = 0x14a
BTN_STYLUS = 0x14b
BTN_STYLUS2 = 0x14c
BTN_TOOL_DOUBLETAP = 0x14d
BTN_TOOL_TRIPLETAP = 0x14e
BTN_WHEEL = 0x150
BTN_GEAR_DOWN = 0x150
BTN_GEAR_UP = 0x151
KEY_OK = 0x160
KEY_SELECT = 0x161
KEY_GOTO = 0x162
KEY_CLEAR = 0x163
KEY_POWER2 = 0x164
KEY_OPTION = 0x165
KEY_INFO = 0x166
KEY_TIME = 0x167
KEY_VENDOR = 0x168
KEY_ARCHIVE = 0x169
KEY_PROGRAM = 0x16a
KEY_CHANNEL = 0x16b
KEY_FAVORITES = 0x16c
KEY_EPG = 0x16d
KEY_PVR = 0x16e
KEY_MHP = 0x16f
KEY_LANGUAGE = 0x170
KEY_TITLE = 0x171
KEY_SUBTITLE = 0x172
KEY_ANGLE = 0x173
KEY_ZOOM = 0x174
KEY_MODE = 0x175
KEY_KEYBOARD = 0x176
KEY_SCREEN = 0x177
KEY_PC = 0x178
KEY_TV = 0x179
KEY_TV2 = 0x17a
KEY_VCR = 0x17b
KEY_VCR2 = 0x17c
KEY_SAT = 0x17d
KEY_SAT2 = 0x17e
KEY_CD = 0x17f
KEY_TAPE = 0x180
KEY_RADIO = 0x181
KEY_TUNER = 0x182
KEY_PLAYER = 0x183
KEY_TEXT = 0x184
KEY_DVD = 0x185
KEY_AUX = 0x186
KEY_MP3 = 0x187
KEY_AUDIO = 0x188
KEY_VIDEO = 0x189
KEY_DIRECTORY = 0x18a
KEY_LIST = 0x18b
KEY_MEMO = 0x18c
KEY_CALENDAR = 0x18d
KEY_RED = 0x18e
KEY_GREEN = 0x18f
KEY_YELLOW = 0x190
KEY_BLUE = 0x191
KEY_CHANNELUP = 0x192
KEY_CHANNELDOWN = 0x193
KEY_FIRST = 0x194
KEY_LAST = 0x195
KEY_AB = 0x196
KEY_NEXT = 0x197
KEY_RESTART = 0x198
KEY_SLOW = 0x199
KEY_SHUFFLE = 0x19a
KEY_BREAK = 0x19b
KEY_PREVIOUS = 0x19c
KEY_DIGITS = 0x19d
KEY_TEEN = 0x19e
KEY_TWEN = 0x19f
KEY_DEL_EOL = 0x1c0
KEY_DEL_EOS = 0x1c1
KEY_INS_LINE = 0x1c2
KEY_DEL_LINE = 0x1c3
KEY_FN = 0x1d0
KEY_FN_ESC = 0x1d1
KEY_FN_F1 = 0x1d2
KEY_FN_F2 = 0x1d3
KEY_FN_F3 = 0x1d4
KEY_FN_F4 = 0x1d5
KEY_FN_F5 = 0x1d6
KEY_FN_F6 = 0x1d7
KEY_FN_F7 = 0x1d8
KEY_FN_F8 = 0x1d9
KEY_FN_F9 = 0x1da
KEY_FN_F10 = 0x1db
KEY_FN_F11 = 0x1dc
KEY_FN_F12 = 0x1dd
KEY_FN_1 = 0x1de
KEY_FN_2 = 0x1df
KEY_FN_D = 0x1e0
KEY_FN_E = 0x1e1
KEY_FN_F = 0x1e2
KEY_FN_S = 0x1e3
KEY_FN_B = 0x1e4
KEY_MAX = 0x1ff
# Relative axes
REL_X = 0x00
REL_Y = 0x01
REL_Z = 0x02
REL_RX = 0x03
REL_RY = 0x04
REL_RZ = 0x05
REL_HWHEEL = 0x06
REL_DIAL = 0x07
REL_WHEEL = 0x08
REL_MISC = 0x09
REL_MAX = 0x0f
# Absolute axes
ABS_X = 0x00
ABS_Y = 0x01
ABS_Z = 0x02
ABS_RX = 0x03
ABS_RY = 0x04
ABS_RZ = 0x05
ABS_THROTTLE = 0x06
ABS_RUDDER = 0x07
ABS_WHEEL = 0x08
ABS_GAS = 0x09
ABS_BRAKE = 0x0a
ABS_HAT0X = 0x10
ABS_HAT0Y = 0x11
ABS_HAT1X = 0x12
ABS_HAT1Y = 0x13
ABS_HAT2X = 0x14
ABS_HAT2Y = 0x15
ABS_HAT3X = 0x16
ABS_HAT3Y = 0x17
ABS_PRESSURE = 0x18
ABS_DISTANCE = 0x19
ABS_TILT_X = 0x1a
ABS_TILT_Y = 0x1b
ABS_TOOL_WIDTH = 0x1c
ABS_VOLUME = 0x20
ABS_MISC = 0x28
ABS_MAX = 0x3f
# Misc events
MSC_SERIAL = 0x00
MSC_PULSELED = 0x01
MSC_GESTURE = 0x02
MSC_RAW = 0x03
MSC_SCAN = 0x04
MSC_MAX = 0x07
# LEDs
LED_NUML = 0x00
LED_CAPSL = 0x01
LED_SCROLLL = 0x02
LED_COMPOSE = 0x03
LED_KANA = 0x04
LED_SLEEP = 0x05
LED_SUSPEND = 0x06
LED_MUTE = 0x07
LED_MISC = 0x08
LED_MAIL = 0x09
LED_CHARGING = 0x0a
LED_MAX = 0x0f
# Autorepeat values
REP_DELAY = 0x00
REP_PERIOD = 0x01
REP_MAX = 0x01
# Sounds
SND_CLICK = 0x00
SND_BELL = 0x01
SND_TONE = 0x02
SND_MAX = 0x07
# IDs.
ID_BUS = 0
ID_VENDOR = 1
ID_PRODUCT = 2
ID_VERSION = 3
BUS_PCI = 0x01
BUS_ISAPNP = 0x02
BUS_USB = 0x03
BUS_HIL = 0x04
BUS_BLUETOOTH = 0x05
BUS_ISA = 0x10
BUS_I8042 = 0x11
BUS_XTKBD = 0x12
BUS_RS232 = 0x13
BUS_GAMEPORT = 0x14
BUS_PARPORT = 0x15
BUS_AMIGA = 0x16
BUS_ADB = 0x17
BUS_I2C = 0x18
BUS_HOST = 0x19
# Values describing the status of an effect
FF_STATUS_STOPPED = 0x00
FF_STATUS_PLAYING = 0x01
FF_STATUS_MAX = 0x01
|
bsd-3-clause
|
isyippee/nova
|
nova/api/openstack/compute/ips.py
|
7
|
2897
|
# Copyright 2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from webob import exc
import nova
from nova.api.openstack import common
from nova.api.openstack.compute.views import addresses as views_addresses
from nova.api.openstack import extensions
from nova.api.openstack import wsgi
from nova.i18n import _
ALIAS = 'ips'
authorize = extensions.os_compute_authorizer(ALIAS)
class IPsController(wsgi.Controller):
"""The servers addresses API controller for the OpenStack API."""
# Note(gmann): here using V2 view builder instead of V3 to have V2.1
# server ips response same as V2 which does not include "OS-EXT-IPS:type"
# & "OS-EXT-IPS-MAC:mac_addr". If needed those can be added with
# microversion by using V3 view builder.
_view_builder_class = views_addresses.ViewBuilder
def __init__(self, **kwargs):
super(IPsController, self).__init__(**kwargs)
self._compute_api = nova.compute.API(skip_policy_check=True)
@extensions.expected_errors(404)
def index(self, req, server_id):
context = req.environ["nova.context"]
authorize(context, action='index')
instance = common.get_instance(self._compute_api, context, server_id)
networks = common.get_networks_for_instance(context, instance)
return self._view_builder.index(networks)
@extensions.expected_errors(404)
def show(self, req, server_id, id):
context = req.environ["nova.context"]
authorize(context, action='show')
instance = common.get_instance(self._compute_api, context, server_id)
networks = common.get_networks_for_instance(context, instance)
if id not in networks:
msg = _("Instance is not a member of specified network")
raise exc.HTTPNotFound(explanation=msg)
return self._view_builder.show(networks[id], id)
class IPs(extensions.V21APIExtensionBase):
"""Server addresses."""
name = "Ips"
alias = ALIAS
version = 1
def get_resources(self):
parent = {'member_name': 'server',
'collection_name': 'servers'}
resources = [
extensions.ResourceExtension(
ALIAS, IPsController(), parent=parent, member_name='ip')]
return resources
def get_controller_extensions(self):
return []
|
apache-2.0
|
tcheehow/MissionPlanner
|
Lib/distutils/command/bdist_wininst.py
|
50
|
15281
|
"""distutils.command.bdist_wininst
Implements the Distutils 'bdist_wininst' command: create a windows installer
exe-program."""
__revision__ = "$Id$"
import sys
import os
import string
from sysconfig import get_python_version
from distutils.core import Command
from distutils.dir_util import remove_tree
from distutils.errors import DistutilsOptionError, DistutilsPlatformError
from distutils import log
from distutils.util import get_platform
class bdist_wininst (Command):
description = "create an executable installer for MS Windows"
user_options = [('bdist-dir=', None,
"temporary directory for creating the distribution"),
('plat-name=', 'p',
"platform name to embed in generated filenames "
"(default: %s)" % get_platform()),
('keep-temp', 'k',
"keep the pseudo-installation tree around after " +
"creating the distribution archive"),
('target-version=', None,
"require a specific python version" +
" on the target system"),
('no-target-compile', 'c',
"do not compile .py to .pyc on the target system"),
('no-target-optimize', 'o',
"do not compile .py to .pyo (optimized)"
"on the target system"),
('dist-dir=', 'd',
"directory to put final built distributions in"),
('bitmap=', 'b',
"bitmap to use for the installer instead of python-powered logo"),
('title=', 't',
"title to display on the installer background instead of default"),
('skip-build', None,
"skip rebuilding everything (for testing/debugging)"),
('install-script=', None,
"basename of installation script to be run after"
"installation or before deinstallation"),
('pre-install-script=', None,
"Fully qualified filename of a script to be run before "
"any files are installed. This script need not be in the "
"distribution"),
('user-access-control=', None,
"specify Vista's UAC handling - 'none'/default=no "
"handling, 'auto'=use UAC if target Python installed for "
"all users, 'force'=always use UAC"),
]
boolean_options = ['keep-temp', 'no-target-compile', 'no-target-optimize',
'skip-build']
def initialize_options (self):
self.bdist_dir = None
self.plat_name = None
self.keep_temp = 0
self.no_target_compile = 0
self.no_target_optimize = 0
self.target_version = None
self.dist_dir = None
self.bitmap = None
self.title = None
self.skip_build = 0
self.install_script = None
self.pre_install_script = None
self.user_access_control = None
# initialize_options()
def finalize_options (self):
if self.bdist_dir is None:
if self.skip_build and self.plat_name:
# If build is skipped and plat_name is overridden, bdist will
# not see the correct 'plat_name' - so set that up manually.
bdist = self.distribution.get_command_obj('bdist')
bdist.plat_name = self.plat_name
# next the command will be initialized using that name
bdist_base = self.get_finalized_command('bdist').bdist_base
self.bdist_dir = os.path.join(bdist_base, 'wininst')
if not self.target_version:
self.target_version = ""
if not self.skip_build and self.distribution.has_ext_modules():
short_version = get_python_version()
if self.target_version and self.target_version != short_version:
raise DistutilsOptionError, \
"target version can only be %s, or the '--skip-build'" \
" option must be specified" % (short_version,)
self.target_version = short_version
self.set_undefined_options('bdist',
('dist_dir', 'dist_dir'),
('plat_name', 'plat_name'),
)
if self.install_script:
for script in self.distribution.scripts:
if self.install_script == os.path.basename(script):
break
else:
raise DistutilsOptionError, \
"install_script '%s' not found in scripts" % \
self.install_script
# finalize_options()
def run (self):
if (sys.platform != "win32" and
(self.distribution.has_ext_modules() or
self.distribution.has_c_libraries())):
raise DistutilsPlatformError \
("distribution contains extensions and/or C libraries; "
"must be compiled on a Windows 32 platform")
if not self.skip_build:
self.run_command('build')
install = self.reinitialize_command('install', reinit_subcommands=1)
install.root = self.bdist_dir
install.skip_build = self.skip_build
install.warn_dir = 0
install.plat_name = self.plat_name
install_lib = self.reinitialize_command('install_lib')
# we do not want to include pyc or pyo files
install_lib.compile = 0
install_lib.optimize = 0
if self.distribution.has_ext_modules():
# If we are building an installer for a Python version other
# than the one we are currently running, then we need to ensure
# our build_lib reflects the other Python version rather than ours.
# Note that for target_version!=sys.version, we must have skipped the
# build step, so there is no issue with enforcing the build of this
# version.
target_version = self.target_version
if not target_version:
assert self.skip_build, "Should have already checked this"
target_version = sys.version[0:3]
plat_specifier = ".%s-%s" % (self.plat_name, target_version)
build = self.get_finalized_command('build')
build.build_lib = os.path.join(build.build_base,
'lib' + plat_specifier)
# Use a custom scheme for the zip-file, because we have to decide
# at installation time which scheme to use.
for key in ('purelib', 'platlib', 'headers', 'scripts', 'data'):
value = string.upper(key)
if key == 'headers':
value = value + '/Include/$dist_name'
setattr(install,
'install_' + key,
value)
log.info("installing to %s", self.bdist_dir)
install.ensure_finalized()
# avoid warning of 'install_lib' about installing
# into a directory not in sys.path
sys.path.insert(0, os.path.join(self.bdist_dir, 'PURELIB'))
install.run()
del sys.path[0]
# And make an archive relative to the root of the
# pseudo-installation tree.
from tempfile import mktemp
archive_basename = mktemp()
fullname = self.distribution.get_fullname()
arcname = self.make_archive(archive_basename, "zip",
root_dir=self.bdist_dir)
# create an exe containing the zip-file
self.create_exe(arcname, fullname, self.bitmap)
if self.distribution.has_ext_modules():
pyversion = get_python_version()
else:
pyversion = 'any'
self.distribution.dist_files.append(('bdist_wininst', pyversion,
self.get_installer_filename(fullname)))
# remove the zip-file again
log.debug("removing temporary file '%s'", arcname)
os.remove(arcname)
if not self.keep_temp:
remove_tree(self.bdist_dir, dry_run=self.dry_run)
# run()
def get_inidata (self):
# Return data describing the installation.
lines = []
metadata = self.distribution.metadata
# Write the [metadata] section.
lines.append("[metadata]")
# 'info' will be displayed in the installer's dialog box,
# describing the items to be installed.
info = (metadata.long_description or '') + '\n'
# Escape newline characters
def escape(s):
return string.replace(s, "\n", "\\n")
for name in ["author", "author_email", "description", "maintainer",
"maintainer_email", "name", "url", "version"]:
data = getattr(metadata, name, "")
if data:
info = info + ("\n %s: %s" % \
(string.capitalize(name), escape(data)))
lines.append("%s=%s" % (name, escape(data)))
# The [setup] section contains entries controlling
# the installer runtime.
lines.append("\n[Setup]")
if self.install_script:
lines.append("install_script=%s" % self.install_script)
lines.append("info=%s" % escape(info))
lines.append("target_compile=%d" % (not self.no_target_compile))
lines.append("target_optimize=%d" % (not self.no_target_optimize))
if self.target_version:
lines.append("target_version=%s" % self.target_version)
if self.user_access_control:
lines.append("user_access_control=%s" % self.user_access_control)
title = self.title or self.distribution.get_fullname()
lines.append("title=%s" % escape(title))
import time
import distutils
build_info = "Built %s with distutils-%s" % \
(time.ctime(time.time()), distutils.__version__)
lines.append("build_info=%s" % build_info)
return string.join(lines, "\n")
# get_inidata()
def create_exe (self, arcname, fullname, bitmap=None):
import struct
self.mkpath(self.dist_dir)
cfgdata = self.get_inidata()
installer_name = self.get_installer_filename(fullname)
self.announce("creating %s" % installer_name)
if bitmap:
bitmapdata = open(bitmap, "rb").read()
bitmaplen = len(bitmapdata)
else:
bitmaplen = 0
file = open(installer_name, "wb")
file.write(self.get_exe_bytes())
if bitmap:
file.write(bitmapdata)
# Convert cfgdata from unicode to ascii, mbcs encoded
try:
unicode
except NameError:
pass
else:
if isinstance(cfgdata, unicode):
cfgdata = cfgdata.encode("mbcs")
# Append the pre-install script
cfgdata = cfgdata + "\0"
if self.pre_install_script:
script_data = open(self.pre_install_script, "r").read()
cfgdata = cfgdata + script_data + "\n\0"
else:
# empty pre-install script
cfgdata = cfgdata + "\0"
file.write(cfgdata)
# The 'magic number' 0x1234567B is used to make sure that the
# binary layout of 'cfgdata' is what the wininst.exe binary
# expects. If the layout changes, increment that number, make
# the corresponding changes to the wininst.exe sources, and
# recompile them.
header = struct.pack("<iii",
0x1234567B, # tag
len(cfgdata), # length
bitmaplen, # number of bytes in bitmap
)
file.write(header)
file.write(open(arcname, "rb").read())
# create_exe()
def get_installer_filename(self, fullname):
# Factored out to allow overriding in subclasses
if self.target_version:
# if we create an installer for a specific python version,
# it's better to include this in the name
installer_name = os.path.join(self.dist_dir,
"%s.%s-py%s.exe" %
(fullname, self.plat_name, self.target_version))
else:
installer_name = os.path.join(self.dist_dir,
"%s.%s.exe" % (fullname, self.plat_name))
return installer_name
# get_installer_filename()
def get_exe_bytes (self):
from distutils.msvccompiler import get_build_version
# If a target-version other than the current version has been
# specified, then using the MSVC version from *this* build is no good.
# Without actually finding and executing the target version and parsing
# its sys.version, we just hard-code our knowledge of old versions.
# NOTE: Possible alternative is to allow "--target-version" to
# specify a Python executable rather than a simple version string.
# We can then execute this program to obtain any info we need, such
# as the real sys.version string for the build.
cur_version = get_python_version()
if self.target_version and self.target_version != cur_version:
# If the target version is *later* than us, then we assume they
# use what we use
# string compares seem wrong, but are what sysconfig.py itself uses
if self.target_version > cur_version:
bv = get_build_version()
else:
if self.target_version < "2.4":
bv = 6.0
else:
bv = 7.1
else:
# for current version - use authoritative check.
bv = get_build_version()
# wininst-x.y.exe is in the same directory as this file
directory = os.path.dirname(__file__)
# we must use a wininst-x.y.exe built with the same C compiler
# used for python. XXX What about mingw, borland, and so on?
# if plat_name starts with "win" but is not "win32"
# we want to strip "win" and leave the rest (e.g. -amd64)
# for all other cases, we don't want any suffix
if self.plat_name != 'win32' and self.plat_name[:3] == 'win':
sfix = self.plat_name[3:]
else:
sfix = ''
filename = os.path.join(directory, "wininst-%.1f%s.exe" % (bv, sfix))
f = open(filename, "rb")
try:
return f.read()
finally:
f.close()
# class bdist_wininst
|
gpl-3.0
|
VigTech/Vigtech-Services
|
env/lib/python2.7/site-packages/pip/_vendor/requests/api.py
|
435
|
5415
|
# -*- coding: utf-8 -*-
"""
requests.api
~~~~~~~~~~~~
This module implements the Requests API.
:copyright: (c) 2012 by Kenneth Reitz.
:license: Apache2, see LICENSE for more details.
"""
from . import sessions
def request(method, url, **kwargs):
"""Constructs and sends a :class:`Request <Request>`.
:param method: method for the new :class:`Request` object.
:param url: URL for the new :class:`Request` object.
:param params: (optional) Dictionary or bytes to be sent in the query string for the :class:`Request`.
:param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
:param json: (optional) json data to send in the body of the :class:`Request`.
:param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`.
:param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`.
:param files: (optional) Dictionary of ``'name': file-like-objects`` (or ``{'name': ('filename', fileobj)}``) for multipart encoding upload.
:param auth: (optional) Auth tuple to enable Basic/Digest/Custom HTTP Auth.
:param timeout: (optional) How long to wait for the server to send data
before giving up, as a float, or a (`connect timeout, read timeout
<user/advanced.html#timeouts>`_) tuple.
:type timeout: float or tuple
:param allow_redirects: (optional) Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
:type allow_redirects: bool
:param proxies: (optional) Dictionary mapping protocol to the URL of the proxy.
:param verify: (optional) if ``True``, the SSL cert will be verified. A CA_BUNDLE path can also be provided.
:param stream: (optional) if ``False``, the response content will be immediately downloaded.
:param cert: (optional) if String, path to ssl client cert file (.pem). If Tuple, ('cert', 'key') pair.
:return: :class:`Response <Response>` object
:rtype: requests.Response
Usage::
>>> import requests
>>> req = requests.request('GET', 'http://httpbin.org/get')
<Response [200]>
"""
session = sessions.Session()
response = session.request(method=method, url=url, **kwargs)
# By explicitly closing the session, we avoid leaving sockets open which
# can trigger a ResourceWarning in some cases, and look like a memory leak
# in others.
session.close()
return response
def get(url, params=None, **kwargs):
"""Sends a GET request.
:param url: URL for the new :class:`Request` object.
:param params: (optional) Dictionary or bytes to be sent in the query string for the :class:`Request`.
:param \*\*kwargs: Optional arguments that ``request`` takes.
:return: :class:`Response <Response>` object
:rtype: requests.Response
"""
kwargs.setdefault('allow_redirects', True)
return request('get', url, params=params, **kwargs)
def options(url, **kwargs):
"""Sends a OPTIONS request.
:param url: URL for the new :class:`Request` object.
:param \*\*kwargs: Optional arguments that ``request`` takes.
:return: :class:`Response <Response>` object
:rtype: requests.Response
"""
kwargs.setdefault('allow_redirects', True)
return request('options', url, **kwargs)
def head(url, **kwargs):
"""Sends a HEAD request.
:param url: URL for the new :class:`Request` object.
:param \*\*kwargs: Optional arguments that ``request`` takes.
:return: :class:`Response <Response>` object
:rtype: requests.Response
"""
kwargs.setdefault('allow_redirects', False)
return request('head', url, **kwargs)
def post(url, data=None, json=None, **kwargs):
"""Sends a POST request.
:param url: URL for the new :class:`Request` object.
:param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
:param json: (optional) json data to send in the body of the :class:`Request`.
:param \*\*kwargs: Optional arguments that ``request`` takes.
:return: :class:`Response <Response>` object
:rtype: requests.Response
"""
return request('post', url, data=data, json=json, **kwargs)
def put(url, data=None, **kwargs):
"""Sends a PUT request.
:param url: URL for the new :class:`Request` object.
:param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
:param \*\*kwargs: Optional arguments that ``request`` takes.
:return: :class:`Response <Response>` object
:rtype: requests.Response
"""
return request('put', url, data=data, **kwargs)
def patch(url, data=None, **kwargs):
"""Sends a PATCH request.
:param url: URL for the new :class:`Request` object.
:param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
:param \*\*kwargs: Optional arguments that ``request`` takes.
:return: :class:`Response <Response>` object
:rtype: requests.Response
"""
return request('patch', url, data=data, **kwargs)
def delete(url, **kwargs):
"""Sends a DELETE request.
:param url: URL for the new :class:`Request` object.
:param \*\*kwargs: Optional arguments that ``request`` takes.
:return: :class:`Response <Response>` object
:rtype: requests.Response
"""
return request('delete', url, **kwargs)
|
lgpl-3.0
|
baidu/Paddle
|
python/paddle/fluid/transpiler/distribute_transpiler.py
|
1
|
90029
|
# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
"""
Steps to transpile trainer:
1. split variable to multiple blocks, aligned by product(dim[1:]) (width).
2. rename splited grad variables to add trainer_id suffix ".trainer_%d".
3. modify trainer program add split_op to each grad variable.
4. append send_op to send splited variables to server and
5. add recv_op to fetch params(splited blocks or origin param) from server.
6. append concat_op to merge splited blocks to update local weights.
Steps to transpile pserver:
1. create new program for parameter server.
2. create params and grad variables that assigned to current server instance.
3. create a sub-block in the server side program
4. append ops that should run on current server instance.
5. add listen_and_serv op
"""
import sys
import math
from functools import reduce
import collections
import six
import logging
import numpy as np
from .ps_dispatcher import RoundRobin, PSDispatcher
from .. import core, framework, unique_name
from ..framework import Program, default_main_program, \
default_startup_program, Block, Parameter, grad_var_name
from .details import wait_server_ready, UnionFind, VarStruct, VarsDistributed
from .details import delete_ops, find_op_by_output_arg
from ..distribute_lookup_table import find_distributed_lookup_table
LOOKUP_TABLE_TYPE = "lookup_table"
LOOKUP_TABLE_GRAD_TYPE = "lookup_table_grad"
OP_ROLE_VAR_ATTR_NAME = core.op_proto_and_checker_maker.kOpRoleVarAttrName()
RPC_OP_ROLE_ATTR_NAME = op_role_attr_name = core.op_proto_and_checker_maker.kOpRoleAttrName(
)
OPT_OP_ROLE_ATTR_VALUE = core.op_proto_and_checker_maker.OpRole.Optimize
RPC_OP_ROLE_ATTR_VALUE = core.op_proto_and_checker_maker.OpRole.RPC
DIST_OP_ROLE_ATTR_VALUE = core.op_proto_and_checker_maker.OpRole.Dist
LR_SCHED_OP_ROLE_ATTR_VALUE = core.op_proto_and_checker_maker.OpRole.LRSched
PRINT_LOG = False
def log(*args):
if PRINT_LOG:
print(args)
class VarBlock:
def __init__(self, varname, offset, size):
self.varname = varname
# NOTE: real offset is offset * size
self.offset = offset
self.size = size
def __str__(self):
return "%s:%d:%d" % (self.varname, self.offset, self.size)
def same_or_split_var(p_name, var_name):
return p_name == var_name or p_name.startswith(var_name + ".block")
def slice_variable(var_list, slice_count, min_block_size):
"""
We may need to split dense tensor to one or more blocks and put
them equally onto parameter server. One block is a sub-tensor
aligned by dim[0] of the tensor.
We need to have a minimal block size so that the calculations in
the parameter server side can gain better performance. By default
minimum block size 8K elements (maybe 16bit or 32bit or 64bit).
Args:
var_list (list): List of variables.
slice_count (int): Numel of count that variables will be sliced, which
could be the pserver services' count.
min_block_size (int): Minimum splitted block size.
Returns:
blocks (list[(varname, block_id, current_block_size)]): A list
of VarBlocks. Each VarBlock specifies a shard of the var.
"""
blocks = []
for var in var_list:
split_count = slice_count
var_numel = reduce(lambda x, y: x * y, var.shape)
max_pserver_count = int(math.floor(var_numel / float(min_block_size)))
if max_pserver_count == 0:
max_pserver_count = 1
if max_pserver_count < slice_count:
split_count = max_pserver_count
block_size = int(math.ceil(var_numel / float(split_count)))
if len(var.shape) >= 2:
# align by dim1(width)
dim1 = reduce(lambda x, y: x * y, var.shape[1:])
remains = block_size % dim1
if remains != 0:
block_size += dim1 - remains
# update split_count after aligning
split_count = int(math.ceil(var_numel / float(block_size)))
for block_id in range(split_count):
curr_block_size = min(block_size, var_numel - (
(block_id) * block_size))
block = VarBlock(var.name, block_id, curr_block_size)
blocks.append(str(block))
return blocks
class DistributeTranspilerConfig(object):
"""
.. py:attribute:: slice_var_up (bool)
Do Tensor slice for pservers, default is True.
.. py:attribute:: split_method (PSDispatcher)
RoundRobin or HashName can be used.
Try to choose the best method to balance loads for pservers.
.. py:attribute:: min_block_size (int)
Minimum number of splitted elements in block.
According to : https://github.com/PaddlePaddle/Paddle/issues/8638#issuecomment-369912156
We can use bandwidth effiently when data size is larger than 2MB.If you
want to change it, please be sure you have read the slice_variable function.
"""
slice_var_up = True
split_method = None
min_block_size = 8192
enable_dc_asgd = False
# supported modes: pserver, nccl2
mode = "pserver"
print_log = False
wait_port = True
# split the send recv var in runtime
runtime_split_send_recv = False
class DistributeTranspiler(object):
"""
**DistributeTranspiler**
Convert the fluid program to distributed data-parallelism programs.
Supports two modes: pserver mode and nccl2 mode.
In pserver mode, the main_program will be transformed to use a remote
parameter server to do parameter optimization. And the optimization
graph will be put into a parameter server program.
In nccl2 mode, the transpiler will append a NCCL_ID broadcasting
op in startup_program to share the NCCL_ID across the job nodes.
After transpile_nccl2 called, you ***must*** pass trainer_id and
num_trainers argument to ParallelExecutor to enable NCCL2 distributed
mode.
Examples:
.. code-block:: python
# for pserver mode
pserver_endpoints = "192.168.0.1:6174,192.168.0.2:6174"
trainer_endpoints = "192.168.0.1:6174,192.168.0.2:6174"
current_endpoint = "192.168.0.1:6174"
trainer_id = 0
trainers = 4
role = os.getenv("PADDLE_TRAINING_ROLE")
t = fluid.DistributeTranspiler()
t.transpile(
trainer_id, pservers=pserver_endpoints, trainers=trainers)
if role == "PSERVER":
pserver_program = t.get_pserver_program(current_endpoint)
pserver_startup_program = t.get_startup_program(current_endpoint,
pserver_program)
elif role == "TRAINER":
trainer_program = t.get_trainer_program()
# for nccl2 mode
config = fluid.DistributeTranspilerConfig()
config.mode = "nccl2"
t = fluid.DistributeTranspiler(config=config)
t.transpile(trainer_id, workers=workers, current_endpoint=curr_ep)
exe = fluid.ParallelExecutor(
use_cuda,
loss_name=loss_var.name,
num_trainers=len(trainers.split(",)),
trainer_id=trainer_id
)
"""
def __init__(self, config=None):
if config is not None:
self.config = config
else:
self.config = DistributeTranspilerConfig()
if self.config.split_method is None:
self.config.split_method = RoundRobin
global PRINT_LOG
if self.config.print_log:
PRINT_LOG = True
assert (self.config.min_block_size >= 8192)
assert (self.config.split_method.__bases__[0] == PSDispatcher)
def _transpile_nccl2(self,
trainer_id,
trainers,
current_endpoint,
startup_program=None,
wait_port=True):
if not startup_program:
startup_program = default_startup_program()
if trainer_id >= 0:
worker_endpoints = trainers.split(",")
# send NCCL_ID to others or recv from trainer 0
worker_endpoints.remove(current_endpoint)
if trainer_id == 0 and wait_port:
wait_server_ready(worker_endpoints)
nccl_id_var = startup_program.global_block().create_var(
name="NCCLID", persistable=True, type=core.VarDesc.VarType.RAW)
startup_program.global_block().append_op(
type="gen_nccl_id",
inputs={},
outputs={"NCCLID": nccl_id_var},
attrs={
"endpoint": current_endpoint,
"endpoint_list": worker_endpoints,
"trainer_id": trainer_id
})
return nccl_id_var
else:
raise ValueError("must set trainer_id > 0")
def _get_all_remote_sparse_update_op(self, main_program):
sparse_update_ops = []
sparse_update_op_types = ["lookup_table", "nce", "hierarchical_sigmoid"]
for op in main_program.global_block().ops:
if op.type in sparse_update_op_types and op.attr(
'remote_prefetch') is True:
sparse_update_ops.append(op)
return sparse_update_ops
def _update_remote_sparse_update_op(self, param_varname, height_sections,
endpint_map, table_names):
for op in self.sparse_update_ops:
if param_varname in op.input_arg_names:
op._set_attr('epmap', endpint_map)
op._set_attr('table_names', table_names)
op._set_attr('height_sections', height_sections)
op._set_attr('trainer_id', self.trainer_id)
def _is_input_of_remote_sparse_update_op(self, param_name):
for op in self.sparse_update_ops:
if param_name in op.input_arg_names:
return True
return False
def transpile(self,
trainer_id,
program=None,
pservers="127.0.0.1:6174",
trainers=1,
sync_mode=True,
startup_program=None,
current_endpoint="127.0.0.1:6174"):
"""
Run the transpiler.
Args:
trainer_id (int): id for current trainer worker, if you have
n workers, the id may range from 0 ~ n-1
program (Program|None): program to transpile,
default is fluid.default_main_program().
startup_program (Program|None): startup_program to transpile,
default is fluid.default_startup_program().
pservers (str): comma separated ip:port string for the pserver
list.
trainers (int|str): in pserver mode this is the number of
trainers, in nccl2 mode this is a string of trainer
endpoints.
sync_mode (bool): Do sync training or not, default is True.
startup_program (Program|None): startup_program to transpile,
default is fluid.default_main_program().
current_endpoint (str): need pass current endpoint when
transpile as nccl2 distributed mode. In pserver mode
this argument is not used.
"""
if program is None:
program = default_main_program()
if startup_program is None:
startup_program = default_startup_program()
self.origin_program = program
self.startup_program = startup_program
self.origin_startup_program = self.startup_program.clone()
if self.config.mode == "nccl2":
assert (isinstance(trainers, str))
self.origin_program._trainers_endpoints = trainers.split(",")
self._transpile_nccl2(
trainer_id,
trainers,
current_endpoint,
startup_program=startup_program,
wait_port=self.config.wait_port)
return
self.trainer_num = trainers
self.sync_mode = sync_mode
self.trainer_id = trainer_id
pserver_endpoints = pservers.split(",")
self.pserver_endpoints = pserver_endpoints
self.vars_overview = VarsDistributed()
self.optimize_ops, self.params_grads = self._get_optimize_pass()
ps_dispatcher = self.config.split_method(self.pserver_endpoints)
self.table_name = find_distributed_lookup_table(self.origin_program)
self.has_distributed_lookup_table = self.table_name != None
self.param_name_to_grad_name = dict()
self.grad_name_to_param_name = dict()
for param_var, grad_var in self.params_grads:
self.param_name_to_grad_name[param_var.name] = grad_var.name
self.grad_name_to_param_name[grad_var.name] = param_var.name
# get all sparse update ops
self.sparse_update_ops = self._get_all_remote_sparse_update_op(
self.origin_program)
# use_sparse_update_param_name -> split_height_section
self.sparse_param_to_height_sections = dict()
# add distributed attrs to program
self.origin_program._is_distributed = True
self.origin_program._endpoints = self.pserver_endpoints
self.origin_program._ps_endpoint = current_endpoint
self.origin_program._is_chief = self.trainer_id == 0
self.origin_program._distributed_lookup_table = self.table_name if self.table_name else None
# split and create vars, then put splited vars in dicts for later use.
# step 1: split and create vars, then put splited vars in dicts for later use.
self._init_splited_vars()
# step 2: insert send op to send gradient vars to parameter servers
ps_dispatcher.reset()
send_vars = []
# in general cases, the number of pservers is times of 2, and this
# will lead to uneven distribution among weights and bias:
# fc_w@GRAD_trainer_0, fc_w@GRAD_trainer_1 --> pserver1
# fc_b@GRAD_trainer_0, fc_b@GRAD_trainer_1 --> pserver2
# shuffle the map will avoid the uneven distribution above
grad_var_mapping_items = list(six.iteritems(self.grad_var_mapping))
if not self.config.slice_var_up:
np.random.seed(self.origin_program.random_seed)
np.random.shuffle(grad_var_mapping_items)
self.grad_name_to_send_dummy_out = dict()
for grad_varname, splited_vars in grad_var_mapping_items:
eplist = ps_dispatcher.dispatch(splited_vars)
if not self.config.slice_var_up:
assert (len(splited_vars) == 1)
splited_grad_varname = grad_varname
if len(splited_vars) == 1:
splited_grad_varname = splited_vars[0].name
index = find_op_by_output_arg(
program.global_block(), splited_grad_varname, reverse=True)
if splited_vars[0].type == core.VarDesc.VarType.SELECTED_ROWS:
sparse_param_name = self.grad_name_to_param_name[
grad_varname]
if self._is_input_of_remote_sparse_update_op(
sparse_param_name):
self.sparse_param_to_height_sections[
sparse_param_name] = [splited_vars[0].shape[0]]
elif len(splited_vars) > 1:
orig_var = program.global_block().vars[splited_grad_varname]
index = find_op_by_output_arg(
program.global_block(), splited_grad_varname, reverse=True)
if not self.config.runtime_split_send_recv:
self._insert_split_op(program, orig_var, index,
splited_vars)
index += 1
else:
AssertionError("Can not insert the send op by original "
"variable name :", splited_grad_varname)
dummy_output = program.global_block().create_var(
name=framework.generate_control_dev_var_name())
self.grad_name_to_send_dummy_out[grad_varname] = dummy_output
if self.config.runtime_split_send_recv:
send_input_vars = [
program.global_block().vars[splited_grad_varname]
]
sections = self._get_splited_var_sections(splited_vars)
send_varnames = [var.name for var in splited_vars]
else:
send_input_vars = splited_vars
sections = []
send_varnames = []
# get send op_role_var, if not splited, the grad should have .trainer suffix
# if splited, grad should be the original grad var name (split_by_ref and send
# will be on the same place). ParallelExecutor
# will use op_role_var to get expected device place to run this op.
program.global_block()._insert_op(
index=index + 1,
type="send",
inputs={"X": send_input_vars},
outputs={"Out": dummy_output},
attrs={
"epmap": eplist,
"sections": sections,
"send_varnames": send_varnames,
RPC_OP_ROLE_ATTR_NAME: RPC_OP_ROLE_ATTR_VALUE,
OP_ROLE_VAR_ATTR_NAME: [
self.grad_name_to_param_name[grad_varname],
splited_grad_varname
],
"sync_mode": not self.sync_mode,
})
for _, var in enumerate(splited_vars):
send_vars.append(var)
if self.sync_mode:
send_barrier_out = program.global_block().create_var(
name=framework.generate_control_dev_var_name())
if self.has_distributed_lookup_table:
self.grad_name_to_send_dummy_out[
self.table_name] = program.global_block().create_var(
name=framework.generate_control_dev_var_name())
input_deps = list(self.grad_name_to_send_dummy_out.values())
program.global_block().append_op(
type="send_barrier",
inputs={"X": list(input_deps)},
outputs={"Out": send_barrier_out},
attrs={
"endpoints": pserver_endpoints,
"sync_mode": self.sync_mode,
"trainer_id": self.trainer_id,
RPC_OP_ROLE_ATTR_NAME: RPC_OP_ROLE_ATTR_VALUE
})
# step 3: insert recv op to receive parameters from parameter server
recv_vars = []
for _, var in enumerate(send_vars):
recv_vars.append(self.grad_param_mapping[var])
ps_dispatcher.reset()
eplist = ps_dispatcher.dispatch(recv_vars)
for i, ep in enumerate(eplist):
self.param_grad_ep_mapping[ep]["params"].append(recv_vars[i])
self.param_grad_ep_mapping[ep]["grads"].append(send_vars[i])
distributed_var = self.vars_overview.get_distributed_var_by_slice(
recv_vars[i].name)
distributed_var.endpoint = ep
# step4: Concat the parameters splits together after recv.
all_recv_outputs = []
for param_varname, splited_var in six.iteritems(self.param_var_mapping):
eps = []
table_names = []
for var in splited_var:
index = [v.name for v in recv_vars].index(var.name)
eps.append(eplist[index])
table_names.append(var.name)
if self.sync_mode:
recv_dep_in = send_barrier_out
else:
# connect deps to send op in async mode
recv_dep_in = self.grad_name_to_send_dummy_out[
self.param_name_to_grad_name[param_varname]]
# get recv op_role_var, if not splited, the grad should have .trainer suffix
# if splited, grad should be the original grad var name. ParallelExecutor
# will use op_role_var to get expected device place to run this op.
orig_grad_name = self.param_name_to_grad_name[param_varname]
recv_op_role_var_name = orig_grad_name
splited_trainer_grad = self.grad_var_mapping[orig_grad_name]
if len(splited_trainer_grad) == 1:
recv_op_role_var_name = splited_trainer_grad[0].name
if param_varname in self.sparse_param_to_height_sections:
for table_name in table_names:
distributed_var = self.vars_overview.get_distributed_var_by_slice(
table_name)
distributed_var.vtype = "RemotePrefetch"
height_sections = self.sparse_param_to_height_sections[
param_varname]
self._update_remote_sparse_update_op(
param_varname, height_sections, eps, table_names)
else:
recv_varnames = []
if self.config.runtime_split_send_recv:
orig_param = program.global_block().vars[param_varname]
recv_varnames = [var.name for var in splited_var]
splited_var = [orig_param]
all_recv_outputs.extend(splited_var)
program.global_block().append_op(
type="recv",
inputs={"X": [recv_dep_in]},
outputs={"Out": splited_var},
attrs={
"epmap": eps,
"recv_varnames": recv_varnames,
"trainer_id": self.trainer_id,
RPC_OP_ROLE_ATTR_NAME: RPC_OP_ROLE_ATTR_VALUE,
OP_ROLE_VAR_ATTR_NAME:
[param_varname, recv_op_role_var_name],
"sync_mode": not self.sync_mode
})
if self.sync_mode:
# form a WAW dependency
program.global_block().append_op(
type="fetch_barrier",
inputs={},
outputs={"Out": all_recv_outputs},
attrs={
"endpoints": pserver_endpoints,
"trainer_id": self.trainer_id,
RPC_OP_ROLE_ATTR_NAME: RPC_OP_ROLE_ATTR_VALUE
})
for param_varname, splited_var in six.iteritems(self.param_var_mapping):
if len(splited_var) <= 1:
continue
orig_param = program.global_block().vars[param_varname]
if param_varname not in self.sparse_param_to_height_sections:
if not self.config.runtime_split_send_recv:
program.global_block().append_op(
type="concat",
inputs={"X": splited_var},
outputs={"Out": [orig_param]},
attrs={
"axis": 0,
RPC_OP_ROLE_ATTR_NAME: DIST_OP_ROLE_ATTR_VALUE
})
self._get_trainer_startup_program(recv_vars=recv_vars, eplist=eplist)
if self.has_distributed_lookup_table:
self._replace_lookup_table_op_with_prefetch(program,
pserver_endpoints)
self._split_table_grad_and_add_send_vars(program, pserver_endpoints)
self._get_distributed_optimizer_vars()
self.origin_program._parameters_on_pservers = self.vars_overview
def get_trainer_program(self, wait_port=True):
"""
Get transpiled trainer side program.
Returns:
Program: trainer side program.
"""
# remove optimize ops and add a send op to main_program
# FIXME(typhoonzero): Also ops like clip_gradient, lrn_decay?
lr_ops = self._get_lr_ops()
delete_ops(self.origin_program.global_block(), self.optimize_ops)
delete_ops(self.origin_program.global_block(), lr_ops)
# delete table init op
if self.has_distributed_lookup_table:
table_var = self.startup_program.global_block().vars[
self.table_name]
table_param_init_op = []
for op in self.startup_program.global_block().ops:
if self.table_name in op.output_arg_names:
table_param_init_op.append(op)
init_op_num = len(table_param_init_op)
if init_op_num != 1:
raise ValueError("table init op num should be 1, now is " + str(
init_op_num))
table_init_op = table_param_init_op[0]
self.startup_program.global_block().append_op(
type="fake_init",
inputs={},
outputs={"Out": table_var},
attrs={"shape": table_init_op.attr('shape')})
delete_ops(self.startup_program.global_block(), table_param_init_op)
self.origin_program.__str__()
if wait_port:
wait_server_ready(self.pserver_endpoints)
return self.origin_program
def _get_trainer_startup_program(self, recv_vars, eplist):
"""
Get transpiled trainer side startup program.
Args:
recv_vars (list): Variable list to recv for current trainer_id
eplist (list): A list of strings indicating
Returns:
Program: trainer side startup program.
"""
startup_program = self.startup_program
# FIXME(gongwb): delete not need ops.
# note that: some parameter is not trainable and those ops can't be deleted.
for varname, splited_var in six.iteritems(self.param_var_mapping):
# Get the eplist of recv vars
eps = []
for var in splited_var:
index = [v.name for v in recv_vars].index(var.name)
eps.append(eplist[index])
for var in splited_var:
if startup_program.global_block().has_var(var.name):
continue
startup_program.global_block().create_var(
name=var.name,
persistable=False,
type=var.type,
dtype=var.dtype,
shape=var.shape,
lod_level=var.lod_level)
op = startup_program.global_block().append_op(
type="recv",
inputs={"X": []},
outputs={"Out": splited_var},
attrs={
"epmap": eps,
"trainer_id": self.trainer_id,
RPC_OP_ROLE_ATTR_NAME: RPC_OP_ROLE_ATTR_VALUE
})
fetch_barrier_out = startup_program.global_block().create_var(
name=framework.generate_control_dev_var_name())
startup_program.global_block().append_op(
type="fetch_barrier",
inputs={},
outputs={"Out": fetch_barrier_out},
attrs={
"endpoints": self.pserver_endpoints,
"trainer_id": self.trainer_id,
RPC_OP_ROLE_ATTR_NAME: RPC_OP_ROLE_ATTR_VALUE
})
for varname, splited_var in six.iteritems(self.param_var_mapping):
# add concat ops to merge splited parameters received from parameter servers.
if len(splited_var) <= 1:
continue
# NOTE: if enable memory optimization, origin vars maybe removed.
if varname in startup_program.global_block().vars:
orig_param = startup_program.global_block().vars[varname]
else:
origin_param_var = self.origin_program.global_block().vars[
varname]
orig_param = startup_program.global_block().create_var(
name=varname,
persistable=origin_param_var.persistable,
type=origin_param_var.type,
dtype=origin_param_var.dtype,
shape=origin_param_var.shape)
startup_program.global_block().append_op(
type="concat",
inputs={"X": splited_var},
outputs={"Out": [orig_param]},
attrs={"axis": 0})
return startup_program
def get_pserver_program(self, endpoint):
"""
Get parameter server side program.
Args:
endpoint (str): current parameter server endpoint.
Returns:
Program: the program for current parameter server to run.
"""
# TODO(panyx0718): Revisit this assumption. what if #blocks > #pservers.
# NOTE: assume blocks of the same variable is not distributed
# on the same pserver, only change param/grad varnames for
# trainers to fetch.
sys.stderr.write(
"get_pserver_program() is deprecated, call get_pserver_programs() to get pserver main and startup in a single call.\n"
)
# step1
pserver_program = Program()
pserver_program.random_seed = self.origin_program.random_seed
pserver_program._copy_dist_param_info_from(self.origin_program)
# step2: Create vars to receive vars at parameter servers.
recv_inputs = []
for v in self.param_grad_ep_mapping[endpoint]["params"]:
self._clone_var(pserver_program.global_block(), v)
for v in self.param_grad_ep_mapping[endpoint]["grads"]:
# create vars for each trainer in global scope, so
# we don't need to create them when grad arrives.
# change client side var name to origin name by
# removing ".trainer_%d" suffix
suff_idx = v.name.find(".trainer_")
if suff_idx >= 0:
orig_var_name = v.name[:suff_idx]
else:
orig_var_name = v.name
# NOTE: single_trainer_var must be created for multi-trainer
# case to merge grads from multiple trainers
single_trainer_var = \
pserver_program.global_block().create_var(
name=orig_var_name,
persistable=True,
type=v.type,
dtype=v.dtype,
shape=v.shape)
if self.sync_mode and self.trainer_num > 1:
for trainer_id in range(self.trainer_num):
var = pserver_program.global_block().create_var(
name="%s.trainer_%d" % (orig_var_name, trainer_id),
persistable=False,
type=v.type,
dtype=v.dtype,
shape=v.shape)
recv_inputs.append(var)
else:
recv_inputs.append(single_trainer_var)
# step 3
# Create a union-find data structure from optimize ops,
# If two ops are connected, we could add these two ops
# into one set.
ufind = self._create_ufind(self.optimize_ops)
# step 3.2
# Iterate through the ops and append optimize op which
# located on current pserver
opt_op_on_pserver = []
for _, op in enumerate(self.optimize_ops):
if self._is_optimizer_op(op) and self._is_opt_op_on_pserver(
endpoint, op):
opt_op_on_pserver.append(op)
# step 3.3
# prepare if dc asgd is enabled
if self.config.enable_dc_asgd == True:
assert (self.sync_mode == False)
self.param_bak_list = []
# add param_bak for each trainer
for p in self.param_grad_ep_mapping[endpoint]["params"]:
# each parameter should have w_bak for each trainer id
for i in range(self.trainer_num):
param_bak_name = "%s.trainer_%d_bak" % (p.name, i)
tmpvar = pserver_program.global_block().create_var(
# NOTE: this var name format is used in `request_get_handler`
name=param_bak_name,
type=p.type,
shape=p.shape,
dtype=p.dtype)
self.param_bak_list.append((p, tmpvar))
# step 3.4
# Iterate through the ops, and if an op and the optimize ops
# which located on current pserver are in one set, then
# append it into the sub program.
global_ops = []
# sparse grad name to param name
sparse_grad_to_param = []
def __append_optimize_op__(op, block, grad_to_block_id, merged_var,
lr_ops):
if self._is_optimizer_op(op):
self._append_pserver_ops(block, op, endpoint, grad_to_block_id,
self.origin_program, merged_var,
sparse_grad_to_param)
elif op not in lr_ops:
self._append_pserver_non_opt_ops(block, op)
def __clone_lr_op_sub_block__(op, program, lr_block):
if not op.has_attr('sub_block'):
return
origin_block_desc = op.attr('sub_block')
origin_block = self.origin_program.block(origin_block_desc.id)
assert isinstance(origin_block, Block)
# we put the new sub block to new block to follow the block
# hierarchy of the original blocks
new_sub_block = program._create_block(lr_block.idx)
# clone vars
for var in origin_block.vars:
new_sub_block._clone_variable(var)
# clone ops
for origin_op in origin_block.ops:
cloned_op = self._clone_lr_op(program, new_sub_block, origin_op)
# clone sub_block of op
__clone_lr_op_sub_block__(cloned_op, program, new_sub_block)
# reset the block of op
op._set_attr('sub_block', new_sub_block)
# append lr decay ops to the child block if exists
lr_ops = self._get_lr_ops()
# record optimize blocks and we can run them on pserver parallel
optimize_blocks = []
if len(lr_ops) > 0:
lr_decay_block = pserver_program._create_block(
pserver_program.num_blocks - 1)
optimize_blocks.append(lr_decay_block)
for _, op in enumerate(lr_ops):
cloned_op = self._append_pserver_non_opt_ops(lr_decay_block, op)
# append sub blocks to pserver_program in lr_decay_op
__clone_lr_op_sub_block__(cloned_op, pserver_program,
lr_decay_block)
# append op to the current block
grad_to_block_id = []
pre_block_idx = pserver_program.num_blocks - 1
for idx, opt_op in enumerate(opt_op_on_pserver):
per_opt_block = pserver_program._create_block(pre_block_idx)
optimize_blocks.append(per_opt_block)
optimize_target_param_name = opt_op.attr(OP_ROLE_VAR_ATTR_NAME)[0]
# append grad merging ops before clip and weight decay
# e.g. merge grad -> L2Decay op -> clip op -> optimize
merged_var = None
for _, op in enumerate(self.optimize_ops):
# find the origin grad var before clipping/L2Decay,
# merged_var should be the input var name of L2Decay
grad_varname_for_block = op.attr(OP_ROLE_VAR_ATTR_NAME)[1]
if op.attr(OP_ROLE_VAR_ATTR_NAME)[
0] == optimize_target_param_name:
merged_var = self._append_pserver_grad_merge_ops(
per_opt_block, grad_varname_for_block, endpoint,
grad_to_block_id, self.origin_program)
if merged_var:
break # append optimize op once then append other ops.
if merged_var:
for _, op in enumerate(self.optimize_ops):
# optimizer is connected to itself
if op.attr(OP_ROLE_VAR_ATTR_NAME)[0] == optimize_target_param_name and \
op not in global_ops:
log("append opt op: ", op.type, op.input_arg_names,
merged_var)
__append_optimize_op__(op, per_opt_block,
grad_to_block_id, merged_var,
lr_ops)
# dedup grad to ids list
grad_to_block_id = list(set(grad_to_block_id))
# append global ops
if global_ops:
opt_state_block = pserver_program._create_block(
pserver_program.num_blocks - 1)
optimize_blocks.append(opt_state_block)
for glb_op in global_ops:
__append_optimize_op__(glb_op, opt_state_block,
grad_to_block_id, None, lr_ops)
# process distributed lookup_table
prefetch_var_name_to_block_id = []
if self.has_distributed_lookup_table:
pserver_index = self.pserver_endpoints.index(endpoint)
table_opt_block = self._create_table_optimize_block(
pserver_index, pserver_program, pre_block_idx, grad_to_block_id)
optimize_blocks.append(table_opt_block)
lookup_table_var_name_to_block_id = self._create_prefetch_block(
pserver_index, pserver_program, table_opt_block)
checkpoint_block_id = self._create_checkpoint_save_block(
pserver_program, table_opt_block.idx)
pserver_program._distributed_lookup_table = self.table_name
prefetch_var_name_to_block_id.extend(
lookup_table_var_name_to_block_id)
if len(optimize_blocks) == 0:
logging.warn("pserver [" + str(endpoint) +
"] has no optimize block!!")
pre_block_idx = pserver_program.num_blocks - 1
empty_block = pserver_program._create_block(pre_block_idx)
optimize_blocks.append(empty_block)
# In some case, some parameter server will have no parameter to optimize
# So we give an empty optimize block to parameter server.
attrs = {
"optimize_blocks": optimize_blocks,
"endpoint": endpoint,
"Fanin": self.trainer_num,
"sync_mode": self.sync_mode,
"grad_to_block_id": grad_to_block_id,
"sparse_grad_to_param": sparse_grad_to_param,
}
if self.has_distributed_lookup_table:
attrs['checkpint_block_id'] = checkpoint_block_id
if self.config.enable_dc_asgd:
attrs['dc_asgd'] = True
if len(prefetch_var_name_to_block_id) > 0:
attrs[
'prefetch_var_name_to_block_id'] = prefetch_var_name_to_block_id
# step5 append the listen_and_serv op
pserver_program.global_block().append_op(
type="listen_and_serv",
inputs={'X': recv_inputs},
outputs={},
attrs=attrs)
pserver_program._sync_with_cpp()
# save pserver program to generate pserver side startup relatively.
self.pserver_program = pserver_program
return pserver_program
def get_pserver_programs(self, endpoint):
"""
Get pserver side main program and startup program for distributed training.
Args:
endpoint (str): current pserver endpoint.
Returns:
tuple: (main_program, startup_program), of type "Program"
"""
pserver_prog = self.get_pserver_program(endpoint)
pserver_startup = self.get_startup_program(
endpoint, pserver_program=pserver_prog)
return pserver_prog, pserver_startup
def get_startup_program(self,
endpoint,
pserver_program=None,
startup_program=None):
"""
**Deprecated**
Get startup program for current parameter server.
Modify operator input variables if there are variables that
were split to several blocks.
Args:
endpoint (str): current pserver endpoint.
pserver_program (Program): deprecated, call get_pserver_program first.
startup_program (Program): deprecated, should pass startup_program
when initalizing
Returns:
Program: parameter server side startup program.
"""
s_prog = Program()
orig_s_prog = self.startup_program
s_prog.random_seed = orig_s_prog.random_seed
params = self.param_grad_ep_mapping[endpoint]["params"]
def _get_splited_name_and_shape(varname):
for idx, splited_param in enumerate(params):
pname = splited_param.name
if same_or_split_var(pname, varname) and varname != pname:
return pname, splited_param.shape
return "", []
# 1. create vars in pserver program to startup program
pserver_vars = pserver_program.global_block().vars
created_var_map = collections.OrderedDict()
for _, var in six.iteritems(pserver_vars):
tmpvar = s_prog.global_block()._clone_variable(var)
created_var_map[var.name] = tmpvar
# 2. rename op outputs
for op in orig_s_prog.global_block().ops:
new_outputs = collections.OrderedDict()
# do not append startup op if var is not on this pserver
op_on_pserver = False
# TODO(gongwb): remove this line.
if op.type not in ["recv", "fetch_barrier", "concat"]:
for key in op.output_names:
newname, _ = _get_splited_name_and_shape(op.output(key)[0])
if newname:
op_on_pserver = True
new_outputs[key] = created_var_map[newname]
elif op.output(key)[0] in pserver_vars:
op_on_pserver = True
new_outputs[key] = pserver_vars[op.output(key)[0]]
if op_on_pserver:
# most startup program ops have no inputs
new_inputs = self._get_input_map_from_op(pserver_vars, op)
if op.type in [
"gaussian_random", "fill_constant", "uniform_random"
]:
op._set_attr("shape", list(new_outputs["Out"].shape))
s_prog.global_block().append_op(
type=op.type,
inputs=new_inputs,
outputs=new_outputs,
attrs=op.all_attrs())
if self.config.enable_dc_asgd:
for p, p_bak in self.param_bak_list:
startup_param_var = s_prog.global_block().vars[p.name]
startup_tmpvar = s_prog.global_block().vars[p_bak.name]
# copy init random value to param_bak
s_prog.global_block().append_op(
type="assign",
inputs={"X": startup_param_var},
outputs={"Out": startup_tmpvar})
return s_prog
# ====================== private transpiler functions =====================
def _get_slice_var_info(self, slice_var):
block_suffix = "block"
block_idx = 0
offset = 0
is_slice = False
orig_var_name, block_name, _ = self._get_varname_parts(slice_var.name)
if not block_name:
return is_slice, block_idx, offset
block_idx = int(block_name.split(block_suffix)[1])
skip_dim0 = 0
slice_vars = self.param_var_mapping[orig_var_name]
orig_dim1_flatten = 1
if len(slice_vars[0].shape) >= 2:
orig_dim1_flatten = reduce(lambda x, y: x * y,
slice_vars[0].shape[1:])
for slice_var in slice_vars[:block_idx]:
skip_dim0 += slice_var.shape[0]
offset = skip_dim0 * orig_dim1_flatten
is_slice = True
return is_slice, block_idx, offset
def _get_distributed_optimizer_vars(self):
def _get_distributed_optimizer_var(endpoint):
opt_op_on_pserver = []
for _, op in enumerate(self.optimize_ops):
if self._is_optimizer_op(op) and self._is_opt_op_on_pserver(
endpoint, op):
opt_op_on_pserver.append(op)
for opt_op in opt_op_on_pserver:
dist_var = None
for key in opt_op.input_names:
if key == "Param":
param_name = opt_op.input(key)[0]
dist_var = self.vars_overview.get_distributed_var_by_origin_and_ep(
param_name, endpoint)
break
for key in opt_op.input_names:
if key in ["Param", "Grad", "LearningRate"]:
continue
origin_var = self.origin_program.global_block().vars[
opt_op.input(key)[0]]
# update accumulator variable shape
new_shape = self._get_optimizer_input_shape(
opt_op.type, key, origin_var.shape,
dist_var.slice.shape)
if new_shape == dist_var.slice.shape:
splited_var = VarStruct(
name=origin_var.name,
shape=new_shape,
dtype=origin_var.dtype,
type=origin_var.type,
lod_level=origin_var.lod_level,
persistable=origin_var.persistable)
self.vars_overview.add_distributed_var(
origin_var=origin_var,
slice_var=splited_var,
is_slice=dist_var.is_slice,
block_id=dist_var.block_id,
offset=dist_var.offset,
vtype="Optimizer",
endpoint=endpoint)
else:
self.vars_overview.add_distributed_var(
origin_var=origin_var,
slice_var=origin_var,
is_slice=False,
block_id=0,
offset=0,
vtype="Optimizer",
endpoint=endpoint)
for ep in self.pserver_endpoints:
_get_distributed_optimizer_var(ep)
def _update_dist_lookup_table_vars(self, param_list, grad_list,
params_grads):
# TODO(wuyi): put find a way to put dist lookup table stuff all together.
# update self.table_param_grad and self.trainer_side_table_grad_list
program = self.origin_program
if self.has_distributed_lookup_table:
param_list = [
param for param in param_list if param.name != self.table_name
]
grad_list = [
grad for grad in grad_list
if grad.name != grad_var_name(self.table_name)
]
self.table_param_grad = [
param_grad for param_grad in params_grads
if param_grad[0].name == self.table_name
][0]
table_grad_var = self.table_param_grad[1]
if self.sync_mode:
self.trainer_side_table_grad_list = [
program.global_block().create_var(
name="%s.trainer_%d.pserver_%d" %
(table_grad_var.name, self.trainer_id, index),
type=table_grad_var.type,
shape=table_grad_var.shape,
dtype=table_grad_var.dtype)
for index in range(len(self.pserver_endpoints))
]
else:
self.trainer_side_table_grad_list = [
program.global_block().create_var(
name="%s.pserver_%d" % (table_grad_var.name, index),
type=table_grad_var.type,
shape=table_grad_var.shape,
dtype=table_grad_var.dtype)
for index in range(len(self.pserver_endpoints))
]
return param_list, grad_list
def _init_splited_vars(self):
# update these mappings for further transpile:
# 1. param_var_mapping: param var name -> [splited params vars]
# 2. grad_var_mapping: grad var name -> [splited grads vars]
# 3. grad_param_mapping: grad.blockx -> param.blockx
# 4. param_grad_ep_mapping: ep -> {"params": [], "grads": []}
param_list = []
grad_list = []
param_grad_set = set()
for p, g in self.params_grads:
# skip parameter marked not trainable
if type(p) == Parameter and p.trainable == False:
continue
if p.name not in param_grad_set:
param_list.append(p)
param_grad_set.add(p.name)
if g.name not in param_grad_set:
grad_list.append(g)
param_grad_set.add(g.name)
param_list, grad_list = self._update_dist_lookup_table_vars(
param_list, grad_list, self.params_grads)
if self.config.slice_var_up:
# when we slice var up into blocks, we will slice the var according to
# pserver services' count. A pserver may have two or more listening ports.
grad_blocks = slice_variable(grad_list,
len(self.pserver_endpoints),
self.config.min_block_size)
param_blocks = slice_variable(param_list,
len(self.pserver_endpoints),
self.config.min_block_size)
else:
# when we do NOT slice var up into blocks, we will always slice params
# grads into one block.
grad_blocks = slice_variable(grad_list, 1,
self.config.min_block_size)
param_blocks = slice_variable(param_list, 1,
self.config.min_block_size)
assert (len(grad_blocks) == len(param_blocks))
# origin_param_name -> [splited_param_vars]
self.param_var_mapping = self._create_vars_from_blocklist(
self.origin_program, param_blocks)
for orig_name, splited_vars in self.param_var_mapping.items():
orig_var = self.origin_program.global_block().var(orig_name)
for splited_var in splited_vars:
is_slice, block_id, offset = self._get_slice_var_info(
splited_var)
self.vars_overview.add_distributed_var(
origin_var=orig_var,
slice_var=splited_var,
block_id=block_id,
offset=offset,
is_slice=is_slice,
vtype="Param")
# origin_grad_name -> [splited_grad_vars]
self.grad_var_mapping = self._create_vars_from_blocklist(
self.origin_program,
grad_blocks,
add_trainer_suffix=self.trainer_num > 1)
# dict(grad_splited_var -> param_splited_var)
self.grad_param_mapping = collections.OrderedDict()
for g, p in zip(grad_blocks, param_blocks):
g_name, g_bid, _ = g.split(":")
p_name, p_bid, _ = p.split(":")
self.grad_param_mapping[self.grad_var_mapping[g_name][int(g_bid)]] = \
self.param_var_mapping[p_name][int(p_bid)]
# create mapping of endpoint -> split var to create pserver side program
self.param_grad_ep_mapping = collections.OrderedDict()
[
self.param_grad_ep_mapping.update({
ep: {
"params": [],
"grads": []
}
}) for ep in self.pserver_endpoints
]
# transpiler function for dis lookup_table
def _replace_lookup_table_op_with_prefetch(self, program,
pserver_endpoints):
# 1. replace lookup_table_op with split_ids_op -> prefetch_op -> sum_op
self.all_in_ids_vars = []
self.all_prefetch_input_vars = []
self.all_prefetch_output_vars = []
self.all_out_emb_vars = []
lookup_table_op_index = -1
continue_search_lookup_table_op = True
while continue_search_lookup_table_op:
continue_search_lookup_table_op = False
all_ops = program.global_block().ops
for op in all_ops:
if op.type == LOOKUP_TABLE_TYPE and self.table_name == op.input(
"W")[0]:
if not op.attr('is_distributed'):
raise RuntimeError(
"lookup_table_op that lookup an distributed embedding table"
"should set is_distributed to true")
continue_search_lookup_table_op = True
lookup_table_op_index = lookup_table_op_index if lookup_table_op_index != -1 else list(
all_ops).index(op)
ids_name = op.input("Ids")
out_name = op.output("Out")
ids_var = program.global_block().vars[ids_name[0]]
self.all_in_ids_vars.append(ids_var)
out_var = program.global_block().vars[out_name[0]]
self.all_out_emb_vars.append(out_var)
# delete lookup_table_op
delete_ops(program.global_block(), [op])
# break for loop
break
for index in range(len(self.pserver_endpoints)):
in_var = program.global_block().create_var(
name=str("prefetch_compress_in_tmp_" + str(index)),
type=self.all_in_ids_vars[0].type,
shape=self.all_in_ids_vars[0].shape,
dtype=self.all_in_ids_vars[0].dtype)
self.all_prefetch_input_vars.append(in_var)
out_var = program.global_block().create_var(
name=str("prefetch_compress_out_tmp_" + str(index)),
type=self.all_out_emb_vars[0].type,
shape=self.all_out_emb_vars[0].shape,
dtype=self.all_out_emb_vars[0].dtype)
self.all_prefetch_output_vars.append(out_var)
# insert split_ids_op
program.global_block()._insert_op(
index=lookup_table_op_index,
type="split_ids",
inputs={'Ids': self.all_in_ids_vars},
outputs={"Out": self.all_prefetch_input_vars})
# insert prefetch_op
program.global_block()._insert_op(
index=lookup_table_op_index + 1,
type="prefetch",
inputs={'X': self.all_prefetch_input_vars},
outputs={"Out": self.all_prefetch_output_vars},
attrs={
"epmap": pserver_endpoints,
# FIXME(qiao) temporarily disable this config because prefetch
# is not act as other rpc op, it's more like a forward op
# RPC_OP_ROLE_ATTR_NAME: RPC_OP_ROLE_ATTR_VALUE
})
# insert concat_op
program.global_block()._insert_op(
index=lookup_table_op_index + 2,
type="merge_ids",
inputs={
'Ids': self.all_in_ids_vars,
'Rows': self.all_prefetch_input_vars,
'X': self.all_prefetch_output_vars
},
outputs={"Out": self.all_out_emb_vars})
def _split_table_grad_and_add_send_vars(self, program, pserver_endpoints):
# 2. add split_ids_op and send_op to send gradient to pservers
# there should only be one table_name
all_ops = program.global_block().ops
table_grad_name = grad_var_name(self.table_name)
for op in all_ops:
if table_grad_name in op.output_arg_names:
op_index = list(all_ops).index(op)
# insert split_ids_op
program.global_block()._insert_op(
index=op_index + 1,
type="split_ids",
inputs={
'Ids': [program.global_block().vars[table_grad_name]]
},
outputs={"Out": self.trainer_side_table_grad_list},
attrs={RPC_OP_ROLE_ATTR_NAME: DIST_OP_ROLE_ATTR_VALUE})
program.global_block()._insert_op(
index=op_index + 2,
type="send",
inputs={'X': self.trainer_side_table_grad_list},
outputs={
'Out':
[self.grad_name_to_send_dummy_out[self.table_name]]
if self.sync_mode else []
},
attrs={
"sync_mode": not self.sync_mode,
"epmap": pserver_endpoints,
"trainer_id": self.trainer_id,
RPC_OP_ROLE_ATTR_NAME: RPC_OP_ROLE_ATTR_VALUE,
OP_ROLE_VAR_ATTR_NAME: [
self.grad_name_to_param_name[table_grad_name],
table_grad_name
]
})
break
def _create_prefetch_block(self, pserver_index, pserver_program,
optimize_block):
# STEP: create prefetch block
table_var = pserver_program.global_block().vars[self.table_name]
prefetch_var_name_to_block_id = []
prefetch_block = pserver_program._create_block(optimize_block.idx)
trainer_ids = self.all_prefetch_input_vars[pserver_index]
pserver_ids = pserver_program.global_block().create_var(
name=trainer_ids.name,
type=trainer_ids.type,
shape=trainer_ids.shape,
dtype=trainer_ids.dtype)
trainer_out = self.all_prefetch_output_vars[pserver_index]
pserver_out = pserver_program.global_block().create_var(
name=trainer_out.name,
type=trainer_out.type,
shape=trainer_out.shape,
dtype=trainer_out.dtype)
prefetch_block.append_op(
type="lookup_sparse_table",
inputs={'Ids': pserver_ids,
"W": table_var},
outputs={"Out": pserver_out},
attrs={
"is_sparse": True, # has no effect on lookup_table op
"is_distributed": True,
"padding_idx": -1
})
prefetch_var_name_to_block_id.append(trainer_ids.name + ":" + str(
prefetch_block.idx))
return prefetch_var_name_to_block_id
def _create_table_optimize_block(self, pserver_index, pserver_program,
pre_block_idx, grad_to_block_id):
# STEP: create table optimize block
table_opt_block = pserver_program._create_block(pre_block_idx)
# create table param and grad var in pserver program
# create table optimize block in pserver program
table_opt_op = [
op for op in self.optimize_ops
if 'Param' in op.input_names and op.input("Param")[0] ==
self.table_name
][0]
origin_param_var = self.origin_program.global_block().vars[
self.table_name]
zero_dim = int(
math.ceil(origin_param_var.shape[0] / float(
len(self.pserver_endpoints))))
table_shape = list(origin_param_var.shape)
table_shape[0] = zero_dim
param_var = pserver_program.global_block().create_var(
name=origin_param_var.name,
shape=table_shape,
dtype=origin_param_var.dtype,
type=core.VarDesc.VarType.SELECTED_ROWS,
persistable=True)
# parameter must be selected rows
param_var.desc.set_type(core.VarDesc.VarType.SELECTED_ROWS)
grad_var = pserver_program.global_block()._clone_variable(
self.origin_program.global_block().vars[grad_var_name(
self.table_name)])
lr_var = pserver_program.global_block()._clone_variable(
self.origin_program.global_block().vars[table_opt_op.input(
"LearningRate")[0]])
if self.sync_mode:
# create grad vars in pserver program
table_grad_var = self.table_param_grad[1]
pserver_side_table_grad_list = [
pserver_program.global_block().create_var(
name="%s.trainer_%d.pserver_%d" %
(table_grad_var.name, index, pserver_index),
type=table_grad_var.type,
shape=table_grad_var.shape,
dtype=table_grad_var.dtype)
for index in range(self.trainer_num)
]
# append sum op for pserver_side_table_grad_list
table_opt_block.append_op(
type="sum",
inputs={"X": pserver_side_table_grad_list},
outputs={"Out": [grad_var]},
attrs={"use_mkldnn": False})
else:
# in async_mode, for table gradient, it also need to be splited to each parameter server
origin_grad_name = grad_var.name
splited_grad_name = self.trainer_side_table_grad_list[
pserver_index].name
if not splited_grad_name.startswith(origin_grad_name):
raise ValueError("origin_grad_var: " + splited_grad_name +
" grad_var:" + grad_var.name)
grad_var = pserver_program.global_block()._rename_var(
origin_grad_name, splited_grad_name)
inputs = {
"Param": [param_var],
"Grad": [grad_var],
"LearningRate": [lr_var]
}
outputs = {"ParamOut": [param_var]}
# only support sgd now
logging.warn(
"distribute lookup table only support sgd optimizer, change it's optimizer to sgd instead of "
+ table_opt_op.type)
table_opt_block.append_op(type="sgd", inputs=inputs, outputs=outputs)
# add table parameter gradient and it's block id to grad_to_block_id
grad_to_block_id.append(grad_var.name + ":" + str(table_opt_block.idx))
return table_opt_block
def _create_checkpoint_save_block(self, pserver_program, pre_block_idx):
"""
create a new block to handle save checkpoint.
"""
pserver_program.global_block().create_var(
name="kLookupTablePath",
persistable=True,
type=core.VarDesc.VarType.RAW)
checkpoint_save_block = pserver_program._create_block(pre_block_idx)
# this 'file_path' do not be used in save lookup table variable
checkpoint_save_block.append_op(
type='save',
inputs={'X': [self.table_name]},
outputs={},
attrs={'file_path': "none"})
return checkpoint_save_block.idx
def _create_vars_from_blocklist(self,
program,
block_list,
add_trainer_suffix=False):
"""
Create vars for each split.
NOTE: only grads need to be named for different trainers, use
add_trainer_suffix to rename the grad vars.
Args:
program (ProgramDesc): ProgramDesc which gradients blong.
block_list (list[(varname, block_id, block_size)]): List of gradient blocks.
add_trainer_suffix (Bool): Add trainer suffix to new variable's name if set True.
Returns:
var_mapping (collections.OrderedDict(varname->[new_varname_variable])):A dict mapping
from original var name to each var split.
"""
# varname->[(block_id, current_block_size)]
block_map = collections.OrderedDict()
var_mapping = collections.OrderedDict()
for block_str in block_list:
varname, offset, size = block_str.split(":")
if varname not in block_map:
block_map[varname] = []
block_map[varname].append((int(offset), int(size)))
for varname, splited in six.iteritems(block_map):
orig_var = program.global_block().var(varname)
if len(splited) == 1:
if self.sync_mode and add_trainer_suffix:
new_var_name = "%s.trainer_%d" % \
(orig_var.name, self.trainer_id)
program.global_block()._rename_var(varname, new_var_name)
var_mapping[varname] = \
[program.global_block().var(new_var_name)]
else:
var_mapping[varname] = \
[program.global_block().var(orig_var.name)]
continue
var_mapping[varname] = []
orig_shape = orig_var.shape
orig_dim1_flatten = 1
if len(orig_shape) >= 2:
orig_dim1_flatten = reduce(lambda x, y: x * y, orig_shape[1:])
for i, block in enumerate(splited):
size = block[1]
rows = size // orig_dim1_flatten
splited_shape = [rows]
if len(orig_shape) >= 2:
splited_shape.extend(orig_shape[1:])
new_var_name = ""
if self.sync_mode and add_trainer_suffix:
new_var_name = "%s.block%d.trainer_%d" % \
(varname, i, self.trainer_id)
else:
new_var_name = "%s.block%d" % \
(varname, i)
var = program.global_block().create_var(
name=new_var_name,
persistable=False,
dtype=orig_var.dtype,
type=orig_var.type,
shape=splited_shape) # flattend splited var
var_mapping[varname].append(var)
program.global_block()._sync_with_cpp()
return var_mapping
def _clone_var(self, block, var, persistable=True):
return block.create_var(
name=var.name,
shape=var.shape,
dtype=var.dtype,
type=var.type,
lod_level=var.lod_level,
persistable=persistable)
@staticmethod
def _get_splited_var_sections(splited_vars):
height_sections = []
for v in splited_vars:
height_sections.append(v.shape[0])
return height_sections
def _insert_split_op(self, program, orig_var, index, splited_vars):
height_sections = self._get_splited_var_sections(splited_vars)
if orig_var.type == core.VarDesc.VarType.SELECTED_ROWS:
sparse_param_name = self.grad_name_to_param_name[orig_var.name]
if self._is_input_of_remote_sparse_update_op(sparse_param_name):
self.sparse_param_to_height_sections[
sparse_param_name] = height_sections
program.global_block()._insert_op(
index=index + 1,
type="split_selected_rows",
inputs={"X": orig_var},
outputs={"Out": splited_vars},
attrs={
"height_sections": height_sections,
RPC_OP_ROLE_ATTR_NAME: DIST_OP_ROLE_ATTR_VALUE
})
elif orig_var.type == core.VarDesc.VarType.LOD_TENSOR:
program.global_block()._insert_op(
index=index + 1,
type="split_byref",
inputs={"X": orig_var},
outputs={"Out": splited_vars},
attrs={
"sections": height_sections,
RPC_OP_ROLE_ATTR_NAME: DIST_OP_ROLE_ATTR_VALUE
})
else:
AssertionError("Variable type should be in set "
"[LOD_TENSOR, SELECTED_ROWS]")
def _get_optimizer_input_shape(self, op_type, varkey, orig_shape,
param_shape):
"""
Returns the shape for optimizer inputs that need to be reshaped when
Param and Grad is split to multiple servers.
"""
# HACK(typhoonzero): Should use functions of corresponding optimizer in
# optimizer.py to get the shape, do not bind this in the transpiler.
if op_type == "adam":
if varkey in ["Moment1", "Moment2"]:
return param_shape
elif op_type == "adagrad":
if varkey == "Moment":
return param_shape
elif op_type == "adamax":
if varkey in ["Moment", "InfNorm"]:
return param_shape
elif op_type in ["momentum", "lars_momentum"]:
if varkey == "Velocity":
return param_shape
elif op_type == "rmsprop":
if varkey in ["Moment", "MeanSquare"]:
return param_shape
elif op_type == "decayed_adagrad":
if varkey == "Moment":
return param_shape
elif op_type == "ftrl":
if varkey in ["SquaredAccumulator", "LinearAccumulator"]:
return param_shape
elif op_type == "sgd":
pass
else:
raise ValueError(
"Not supported optimizer for distributed training: %s" %
op_type)
return orig_shape
def _get_varname_parts(self, varname):
# returns origin, blockid, trainerid
orig_var_name = ""
trainer_part = ""
block_part = ""
trainer_idx = varname.find(".trainer_")
if trainer_idx >= 0:
trainer_part = varname[trainer_idx + 1:]
else:
trainer_idx = len(varname)
block_index = varname.find(".block")
if block_index >= 0:
block_part = varname[block_index + 1:trainer_idx]
else:
block_index = len(varname)
orig_var_name = varname[0:min(block_index, trainer_idx)]
return orig_var_name, block_part, trainer_part
def _orig_varname(self, varname):
orig, _, _ = self._get_varname_parts(varname)
return orig
def _append_pserver_grad_merge_ops(self, optimize_block,
grad_varname_for_block, endpoint,
grad_to_block_id, origin_program):
program = optimize_block.program
pserver_block = program.global_block()
grad_block = None
for g in self.param_grad_ep_mapping[endpoint]["grads"]:
if self._orig_varname(g.name) == \
self._orig_varname(grad_varname_for_block):
grad_block = g
break
if not grad_block:
# do not append this op if current endpoint
# is not dealing with this grad block
return None
orig_varname, block_name, trainer_name = self._get_varname_parts(
grad_block.name)
if block_name:
merged_var_name = '.'.join([orig_varname, block_name])
else:
merged_var_name = orig_varname
merged_var = pserver_block.vars[merged_var_name]
grad_to_block_id.append(merged_var.name + ":" + str(optimize_block.idx))
if self.sync_mode and self.trainer_num > 1:
vars2merge = []
for i in range(self.trainer_num):
per_trainer_name = "%s.trainer_%d" % \
(merged_var_name, i)
vars2merge.append(pserver_block.vars[per_trainer_name])
optimize_block.append_op(
type="sum",
inputs={"X": vars2merge},
outputs={"Out": merged_var},
attrs={"use_mkldnn": False})
optimize_block.append_op(
type="scale",
inputs={"X": merged_var},
outputs={"Out": merged_var},
attrs={"scale": 1.0 / float(self.trainer_num)})
return merged_var
def _append_dc_asgd_ops(self, block, param_var, grad_var):
# NOTE: can not use grammar candy here, should put ops in specific block
local_param_bak = block.create_var(
name="%s.local_bak" % param_var.name,
shape=param_var.shape,
type=param_var.type,
dtype=param_var.dtype,
persistable=False)
# trainer_id_var is block local
trainer_id_var = block.create_var(
name="@TRAINER_ID@",
type=core.VarDesc.VarType.LOD_TENSOR,
dtype=core.VarDesc.VarType.INT64,
shape=[1],
persistable=False)
# ref_inputs = [x[1] for x in self.param_bak_list]
ref_inputs = []
for p, p_bak in self.param_bak_list:
if p.name == param_var.name:
ref_inputs.append(p_bak)
block.append_op(
type="ref_by_trainer_id",
inputs={"X": ref_inputs,
"TrainerId": trainer_id_var},
outputs={"Out": local_param_bak})
def __create_temp_var__():
return block.create_var(
name=unique_name.generate("tmp_dc_output"),
shape=param_var.shape,
type=param_var.type,
dtype=param_var.dtype,
persistable=False)
o1 = __create_temp_var__()
block.append_op(
type="elementwise_sub",
inputs={"X": param_var,
"Y": local_param_bak},
outputs={"Out": o1})
o2 = __create_temp_var__()
block.append_op(
type="elementwise_mul",
inputs={"X": o1,
"Y": grad_var},
outputs={"Out": o2})
o3 = __create_temp_var__()
block.append_op(
type="elementwise_mul",
inputs={"X": o2,
"Y": grad_var},
outputs={"Out": o3})
# TODO(typhoonzero): append scale
o4 = __create_temp_var__()
block.append_op(
type="elementwise_add",
inputs={"X": grad_var,
"Y": o3},
outputs={"Out": o4})
return o4
def _append_pserver_ops(self, optimize_block, opt_op, endpoint,
grad_to_block_id, origin_program, merged_var,
sparse_grad_to_param):
program = optimize_block.program
pserver_block = program.global_block()
new_inputs = collections.OrderedDict()
def _get_param_block(opt_op):
# param is already created on global program
param_block = None
for p in self.param_grad_ep_mapping[endpoint]["params"]:
if same_or_split_var(p.name, opt_op.input("Param")[0]):
param_block = p
break
return param_block
if self.config.enable_dc_asgd:
param_var = _get_param_block(opt_op)
dc = self._append_dc_asgd_ops(optimize_block, param_var, merged_var)
for key in opt_op.input_names:
if key == "Grad":
if self.config.enable_dc_asgd:
new_inputs[key] = dc
else:
# Note!! This is for l2decay on sparse gradient, because it will create a new tensor for
# decayed gradient but not inplace modify the origin one
origin_grad_name = opt_op.input(key)[0]
if core.kNewGradSuffix(
) in origin_grad_name and pserver_block.has_var(
origin_grad_name):
new_grad = pserver_block.var(origin_grad_name)
new_inputs[key] = new_grad
else:
new_inputs[key] = merged_var
elif key == "Param":
param_block = _get_param_block(opt_op)
if not param_block:
return
tmpvar = pserver_block.create_var(
name=param_block.name,
persistable=True,
dtype=param_block.dtype,
shape=param_block.shape)
new_inputs[key] = tmpvar
elif key == "LearningRate":
# learning rate variable has already be created by non-optimize op,
# don't create it once again.
lr_varname = opt_op.input(key)[0]
if lr_varname in pserver_block.vars:
new_inputs[key] = pserver_block.vars[opt_op.input(key)[0]]
else:
origin_var = origin_program.global_block().vars[lr_varname]
tmpvar = pserver_block.create_var(
name=origin_var.name,
persistable=origin_var.persistable,
dtype=origin_var.dtype,
shape=origin_var.shape)
new_inputs[key] = tmpvar
for key in opt_op.input_names:
new_shape = None
if key in ["Param", "Grad", "LearningRate"]:
continue
var = self.origin_program.global_block().vars[opt_op.input(key)[0]]
param_var = new_inputs["Param"]
# update accumulator variable shape
new_shape = self._get_optimizer_input_shape(
opt_op.type, key, var.shape, param_var.shape)
tmpvar = pserver_block.create_var(
name=var.name,
persistable=var.persistable,
dtype=var.dtype,
shape=new_shape)
new_inputs[key] = tmpvar
# change output's ParamOut variable
outputs = self._get_output_map_from_op(
self.origin_program.global_block().vars, opt_op)
outputs["ParamOut"] = new_inputs["Param"]
optimize_block.append_op(
type=opt_op.type,
inputs=new_inputs,
outputs=outputs,
attrs=opt_op.all_attrs())
# record sparse grad to param name
if new_inputs["Grad"].type == core.VarDesc.VarType.SELECTED_ROWS:
sparse_grad_to_param.append(
str(new_inputs["Grad"].name) + ":" + str(new_inputs["Param"]
.name))
def _get_pserver_grad_param_var(self, var, var_dict):
"""
Return pserver side grad/param variable, return None
if the variable is not grad/param, e.g.
a@GRAD -> [email protected]
a@GRAD -> a@GRAD (a is not splited)
fc_0.w_0 -> fc_0.w_0.block_0
fc_0.w_0 -> fc_0.w_0 (weight is not splited)
_generated_var_123 -> None
"""
grad_block = None
for _, g in six.iteritems(var_dict):
if self._orig_varname(g.name) == self._orig_varname(var.name):
# skip per trainer vars
if g.name.find(".trainer_") == -1:
# only param or grads have splited blocks
if self._orig_varname(g.name) in self.grad_name_to_param_name or \
self._orig_varname(g.name) in self.param_name_to_grad_name:
grad_block = g
break
return grad_block
def _clone_lr_op(self, program, block, op):
inputs = self._get_input_map_from_op(
self.origin_program.global_block().vars, op)
for key, varlist in six.iteritems(inputs):
if not isinstance(varlist, list):
varlist = [varlist]
for var in varlist:
if var not in program.global_block().vars:
block._clone_variable(var)
outputs = self._get_output_map_from_op(
self.origin_program.global_block().vars, op)
for key, varlist in six.iteritems(outputs):
if not isinstance(varlist, list):
varlist = [varlist]
for var in varlist:
if var not in program.global_block().vars:
block._clone_variable(var)
return block.append_op(
type=op.type, inputs=inputs, outputs=outputs, attrs=op.all_attrs())
def _append_pserver_non_opt_ops(self, optimize_block, opt_op):
program = optimize_block.program
# Append the ops for parameters that do not need to be optimized/updated
inputs = self._get_input_map_from_op(
self.origin_program.global_block().vars, opt_op)
for key, varlist in six.iteritems(inputs):
if not isinstance(varlist, list):
varlist = [varlist]
for i in range(len(varlist)):
var = varlist[i]
# for ops like clipping and weight decay, get the splited var (xxx.block0)
# for inputs/outputs
grad_block = self._get_pserver_grad_param_var(
var, program.global_block().vars)
if grad_block:
varlist[i] = grad_block
elif var.name not in program.global_block().vars:
tmpvar = program.global_block()._clone_variable(var)
varlist[i] = tmpvar
else:
varlist[i] = program.global_block().vars[var.name]
inputs[key] = varlist
outputs = self._get_output_map_from_op(
self.origin_program.global_block().vars, opt_op)
for key, varlist in six.iteritems(outputs):
if not isinstance(varlist, list):
varlist = [varlist]
for i in range(len(varlist)):
var = varlist[i]
grad_block = self._get_pserver_grad_param_var(
var, program.global_block().vars)
if grad_block:
varlist[i] = grad_block
elif var.name not in program.global_block().vars:
tmpvar = program.global_block()._clone_variable(var)
varlist[i] = tmpvar
else:
varlist[i] = program.global_block().vars[var.name]
outputs[key] = varlist
return optimize_block.append_op(
type=opt_op.type,
inputs=inputs,
outputs=outputs,
attrs=opt_op.all_attrs())
def _is_op_connected(self, op1, op2):
# If one op's input is another op's output or
# one op's output is another op's input, we say
# the two operator is connected.
if set(op1.desc.output_arg_names()) & set(op2.desc.input_arg_names()) or \
set(op1.desc.input_arg_names()) & set(op2.desc.output_arg_names()):
return True
return False
def _create_ufind(self, optimize_ops):
# Create a unit find data struct by optimize ops
ufind = UnionFind(optimize_ops)
for i in range(len(optimize_ops)):
for j in range(i, len(optimize_ops)):
op1 = optimize_ops[i]
op2 = optimize_ops[j]
if self._is_op_connected(op1, op2):
ufind.union(op1, op2)
return ufind
def _is_optimizer_op(self, op):
if "Param" in op.input_names and \
"LearningRate" in op.input_names:
return True
return False
def _is_opt_op_on_pserver(self, endpoint, op):
param_names = [
p.name for p in self.param_grad_ep_mapping[endpoint]["params"]
]
if op.input("Param")[0] in param_names:
return True
else:
for n in param_names:
param = op.input("Param")[0]
if same_or_split_var(n, param) and n != param:
return True
return False
def _get_input_map_from_op(self, varmap, op):
"""Returns a dict from op input name to the vars in varmap."""
iomap = collections.OrderedDict()
for key in op.input_names:
vars = []
for varname in op.input(key):
vars.append(varmap[varname])
if len(vars) == 1:
iomap[key] = vars[0]
else:
iomap[key] = vars
return iomap
def _get_output_map_from_op(self, varmap, op):
"""Returns a dict from op output name to the vars in varmap."""
iomap = collections.OrderedDict()
for key in op.output_names:
vars = []
for varname in op.output(key):
vars.append(varmap[varname])
if len(vars) == 1:
iomap[key] = vars[0]
else:
iomap[key] = vars
return iomap
def _get_lr_ops(self):
lr_ops = []
block = self.origin_program.global_block()
for op in block.ops:
role_id = int(op.attr(RPC_OP_ROLE_ATTR_NAME))
if role_id == int(LR_SCHED_OP_ROLE_ATTR_VALUE) or \
role_id == int(LR_SCHED_OP_ROLE_ATTR_VALUE) | \
int(OPT_OP_ROLE_ATTR_VALUE):
lr_ops.append(op)
log("append lr op: ", op.type)
return lr_ops
def _get_lr_ops_deprecated(self):
lr_ops = []
# find learning rate variables by optimize op
lr_vars = set()
for op in self.optimize_ops:
if self._is_optimizer_op(op):
lr_vars.add(op.input("LearningRate")[0])
find_ops = []
# find ops which output is lr var
block = self.origin_program.global_block()
for op in block.ops:
if set(op.output_arg_names) & lr_vars:
find_ops.append(op)
# make a union find struct by the ops in default_main_program
ufind = UnionFind(block.ops)
for op1 in block.ops:
for op2 in block.ops:
# NOTE: we need to skip all optimize ops, since it is connected
# with forward/backward ops and lr ops, we only need the lr ops.
if op1 != op2 and self._is_op_connected(op1, op2) and \
not self._is_optimizer_op(op1) and not self._is_optimizer_op(op2):
ufind.union(op1, op2)
# find all ops which is related with lr var
for op1 in block.ops:
for op2 in find_ops:
if ufind.is_connected(op1, op2):
lr_ops.append(op1)
# we only need to append op for once
break
return lr_ops
def _is_opt_role_op(self, op):
# NOTE: depend on oprole to find out whether this op is for
# optimize
op_maker = core.op_proto_and_checker_maker
optimize_role = core.op_proto_and_checker_maker.OpRole.Optimize
if op_maker.kOpRoleAttrName() in op.attr_names and \
int(op.all_attrs()[op_maker.kOpRoleAttrName()]) == int(optimize_role):
return True
return False
def _get_optimize_pass(self):
"""
Get optimizer operators, parameters and gradients from origin_program
Returns:
opt_ops (list): optimize operators.
params_grads (dict): parameter->gradient.
"""
block = self.origin_program.global_block()
opt_ops = []
params_grads = []
# tmp set to dedup
optimize_params = set()
origin_var_dict = self.origin_program.global_block().vars
for op in block.ops:
if self._is_opt_role_op(op):
opt_ops.append(op)
if op.attr(OP_ROLE_VAR_ATTR_NAME):
param_name = op.attr(OP_ROLE_VAR_ATTR_NAME)[0]
grad_name = op.attr(OP_ROLE_VAR_ATTR_NAME)[1]
if not param_name in optimize_params:
optimize_params.add(param_name)
log("adding param_grad pair: ", param_name, grad_name)
params_grads.append([
origin_var_dict[param_name],
origin_var_dict[grad_name]
])
else:
pass
return opt_ops, params_grads
|
apache-2.0
|
mattpap/sympy-polys
|
sympy/mpmath/tests/test_special.py
|
7
|
2827
|
from sympy.mpmath import *
def test_special():
assert inf == inf
assert inf != -inf
assert -inf == -inf
assert inf != nan
assert nan != nan
assert isnan(nan)
assert --inf == inf
assert abs(inf) == inf
assert abs(-inf) == inf
assert abs(nan) != abs(nan)
assert isnan(inf - inf)
assert isnan(inf + (-inf))
assert isnan(-inf - (-inf))
assert isnan(inf + nan)
assert isnan(-inf + nan)
assert mpf(2) + inf == inf
assert 2 + inf == inf
assert mpf(2) - inf == -inf
assert 2 - inf == -inf
assert inf > 3
assert 3 < inf
assert 3 > -inf
assert -inf < 3
assert inf > mpf(3)
assert mpf(3) < inf
assert mpf(3) > -inf
assert -inf < mpf(3)
assert not (nan < 3)
assert not (nan > 3)
assert isnan(inf * 0)
assert isnan(-inf * 0)
assert inf * 3 == inf
assert inf * -3 == -inf
assert -inf * 3 == -inf
assert -inf * -3 == inf
assert inf * inf == inf
assert -inf * -inf == inf
assert isnan(nan / 3)
assert inf / -3 == -inf
assert inf / 3 == inf
assert 3 / inf == 0
assert -3 / inf == 0
assert 0 / inf == 0
assert isnan(inf / inf)
assert isnan(inf / -inf)
assert isnan(inf / nan)
assert mpf('inf') == mpf('+inf') == inf
assert mpf('-inf') == -inf
assert isnan(mpf('nan'))
assert isinf(inf)
assert isinf(-inf)
assert not isinf(mpf(0))
assert not isinf(nan)
def test_special_powers():
assert inf**3 == inf
assert isnan(inf**0)
assert inf**-3 == 0
assert (-inf)**2 == inf
assert (-inf)**3 == -inf
assert isnan((-inf)**0)
assert (-inf)**-2 == 0
assert (-inf)**-3 == 0
assert isnan(nan**5)
assert isnan(nan**0)
def test_functions_special():
assert exp(inf) == inf
assert exp(-inf) == 0
assert isnan(exp(nan))
assert log(inf) == inf
assert isnan(sin(inf))
assert isnan(sin(nan))
assert atan(inf).ae(pi/2)
assert atan(-inf).ae(-pi/2)
assert isnan(sqrt(nan))
assert sqrt(inf) == inf
def test_convert_special():
float_inf = 1e300 * 1e300
float_ninf = -float_inf
float_nan = float_inf/float_ninf
assert mpf(3) * float_inf == inf
assert mpf(3) * float_ninf == -inf
assert isnan(mpf(3) * float_nan)
assert not (mpf(3) < float_nan)
assert not (mpf(3) > float_nan)
assert not (mpf(3) <= float_nan)
assert not (mpf(3) >= float_nan)
assert float(mpf('1e1000')) == float_inf
assert float(mpf('-1e1000')) == float_ninf
assert float(mpf('1e100000000000000000')) == float_inf
assert float(mpf('-1e100000000000000000')) == float_ninf
assert float(mpf('1e-100000000000000000')) == 0.0
def test_div_bug():
assert isnan(nan/1)
assert isnan(nan/2)
assert inf/2 == inf
assert (-inf)/2 == -inf
|
bsd-3-clause
|
chemelnucfin/tensorflow
|
tensorflow/contrib/bigtable/__init__.py
|
29
|
1329
|
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Cloud Bigtable Client for TensorFlow.
This contrib package allows TensorFlow to interface directly with Cloud Bigtable
for high-speed data loading.
@@BigtableClient
@@BigtableTable
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib.bigtable.python.ops.bigtable_api import BigtableClient
from tensorflow.contrib.bigtable.python.ops.bigtable_api import BigtableTable
from tensorflow.python.util.all_util import remove_undocumented
_allowed_symbols = [
'BigtableClient',
'BigtableTable',
]
remove_undocumented(__name__, _allowed_symbols)
|
apache-2.0
|
DasIch/django
|
tests/admin_checks/tests.py
|
118
|
23502
|
from __future__ import unicode_literals
from django import forms
from django.contrib import admin
from django.contrib.admin import AdminSite
from django.contrib.contenttypes.admin import GenericStackedInline
from django.core import checks
from django.test import SimpleTestCase, override_settings
from .models import Album, Book, City, Influence, Song, State, TwoAlbumFKAndAnE
class SongForm(forms.ModelForm):
pass
class ValidFields(admin.ModelAdmin):
form = SongForm
fields = ['title']
class ValidFormFieldsets(admin.ModelAdmin):
def get_form(self, request, obj=None, **kwargs):
class ExtraFieldForm(SongForm):
name = forms.CharField(max_length=50)
return ExtraFieldForm
fieldsets = (
(None, {
'fields': ('name',),
}),
)
class MyAdmin(admin.ModelAdmin):
def check(self, **kwargs):
return ['error!']
@override_settings(
SILENCED_SYSTEM_CHECKS=['fields.W342'], # ForeignKey(unique=True)
INSTALLED_APPS=['django.contrib.auth', 'django.contrib.contenttypes', 'admin_checks']
)
class SystemChecksTestCase(SimpleTestCase):
@override_settings(DEBUG=True)
def test_checks_are_performed(self):
admin.site.register(Song, MyAdmin)
try:
errors = checks.run_checks()
expected = ['error!']
self.assertEqual(errors, expected)
finally:
admin.site.unregister(Song)
admin.sites.system_check_errors = []
@override_settings(DEBUG=True)
def test_custom_adminsite(self):
class CustomAdminSite(admin.AdminSite):
pass
custom_site = CustomAdminSite()
custom_site.register(Song, MyAdmin)
try:
errors = checks.run_checks()
expected = ['error!']
self.assertEqual(errors, expected)
finally:
custom_site.unregister(Song)
admin.sites.system_check_errors = []
def test_field_name_not_in_list_display(self):
class SongAdmin(admin.ModelAdmin):
list_editable = ["original_release"]
errors = SongAdmin(Song, AdminSite()).check()
expected = [
checks.Error(
"The value of 'list_editable[0]' refers to 'original_release', "
"which is not contained in 'list_display'.",
hint=None,
obj=SongAdmin,
id='admin.E122',
)
]
self.assertEqual(errors, expected)
def test_readonly_and_editable(self):
class SongAdmin(admin.ModelAdmin):
readonly_fields = ["original_release"]
list_display = ["pk", "original_release"]
list_editable = ["original_release"]
fieldsets = [
(None, {
"fields": ["title", "original_release"],
}),
]
errors = SongAdmin(Song, AdminSite()).check()
expected = [
checks.Error(
("The value of 'list_editable[0]' refers to 'original_release', "
"which is not editable through the admin."),
hint=None,
obj=SongAdmin,
id='admin.E125',
)
]
self.assertEqual(errors, expected)
def test_editable(self):
class SongAdmin(admin.ModelAdmin):
list_display = ["pk", "title"]
list_editable = ["title"]
fieldsets = [
(None, {
"fields": ["title", "original_release"],
}),
]
errors = SongAdmin(Song, AdminSite()).check()
self.assertEqual(errors, [])
def test_custom_modelforms_with_fields_fieldsets(self):
"""
# Regression test for #8027: custom ModelForms with fields/fieldsets
"""
errors = ValidFields(Song, AdminSite()).check()
self.assertEqual(errors, [])
def test_custom_get_form_with_fieldsets(self):
"""
Ensure that the fieldsets checks are skipped when the ModelAdmin.get_form() method
is overridden.
Refs #19445.
"""
errors = ValidFormFieldsets(Song, AdminSite()).check()
self.assertEqual(errors, [])
def test_fieldsets_fields_non_tuple(self):
"""
Tests for a tuple/list for the first fieldset's fields.
"""
class NotATupleAdmin(admin.ModelAdmin):
list_display = ["pk", "title"]
list_editable = ["title"]
fieldsets = [
(None, {
"fields": "title" # not a tuple
}),
]
errors = NotATupleAdmin(Song, AdminSite()).check()
expected = [
checks.Error(
"The value of 'fieldsets[0][1]['fields']' must be a list or tuple.",
hint=None,
obj=NotATupleAdmin,
id='admin.E008',
)
]
self.assertEqual(errors, expected)
def test_nonfirst_fieldset(self):
"""
Tests for a tuple/list for the second fieldset's fields.
"""
class NotATupleAdmin(admin.ModelAdmin):
fieldsets = [
(None, {
"fields": ("title",)
}),
('foo', {
"fields": "author" # not a tuple
}),
]
errors = NotATupleAdmin(Song, AdminSite()).check()
expected = [
checks.Error(
"The value of 'fieldsets[1][1]['fields']' must be a list or tuple.",
hint=None,
obj=NotATupleAdmin,
id='admin.E008',
)
]
self.assertEqual(errors, expected)
def test_exclude_values(self):
"""
Tests for basic system checks of 'exclude' option values (#12689)
"""
class ExcludedFields1(admin.ModelAdmin):
exclude = 'foo'
errors = ExcludedFields1(Book, AdminSite()).check()
expected = [
checks.Error(
"The value of 'exclude' must be a list or tuple.",
hint=None,
obj=ExcludedFields1,
id='admin.E014',
)
]
self.assertEqual(errors, expected)
def test_exclude_duplicate_values(self):
class ExcludedFields2(admin.ModelAdmin):
exclude = ('name', 'name')
errors = ExcludedFields2(Book, AdminSite()).check()
expected = [
checks.Error(
"The value of 'exclude' contains duplicate field(s).",
hint=None,
obj=ExcludedFields2,
id='admin.E015',
)
]
self.assertEqual(errors, expected)
def test_exclude_in_inline(self):
class ExcludedFieldsInline(admin.TabularInline):
model = Song
exclude = 'foo'
class ExcludedFieldsAlbumAdmin(admin.ModelAdmin):
model = Album
inlines = [ExcludedFieldsInline]
errors = ExcludedFieldsAlbumAdmin(Album, AdminSite()).check()
expected = [
checks.Error(
"The value of 'exclude' must be a list or tuple.",
hint=None,
obj=ExcludedFieldsInline,
id='admin.E014',
)
]
self.assertEqual(errors, expected)
def test_exclude_inline_model_admin(self):
"""
Regression test for #9932 - exclude in InlineModelAdmin should not
contain the ForeignKey field used in ModelAdmin.model
"""
class SongInline(admin.StackedInline):
model = Song
exclude = ['album']
class AlbumAdmin(admin.ModelAdmin):
model = Album
inlines = [SongInline]
errors = AlbumAdmin(Album, AdminSite()).check()
expected = [
checks.Error(
("Cannot exclude the field 'album', because it is the foreign key "
"to the parent model 'admin_checks.Album'."),
hint=None,
obj=SongInline,
id='admin.E201',
)
]
self.assertEqual(errors, expected)
def test_valid_generic_inline_model_admin(self):
"""
Regression test for #22034 - check that generic inlines don't look for
normal ForeignKey relations.
"""
class InfluenceInline(GenericStackedInline):
model = Influence
class SongAdmin(admin.ModelAdmin):
inlines = [InfluenceInline]
errors = SongAdmin(Song, AdminSite()).check()
self.assertEqual(errors, [])
def test_generic_inline_model_admin_non_generic_model(self):
"""
Ensure that a model without a GenericForeignKey raises problems if it's included
in an GenericInlineModelAdmin definition.
"""
class BookInline(GenericStackedInline):
model = Book
class SongAdmin(admin.ModelAdmin):
inlines = [BookInline]
errors = SongAdmin(Song, AdminSite()).check()
expected = [
checks.Error(
"'admin_checks.Book' has no GenericForeignKey.",
hint=None,
obj=BookInline,
id='admin.E301',
)
]
self.assertEqual(errors, expected)
def test_generic_inline_model_admin_bad_ct_field(self):
"A GenericInlineModelAdmin raises problems if the ct_field points to a non-existent field."
class InfluenceInline(GenericStackedInline):
model = Influence
ct_field = 'nonexistent'
class SongAdmin(admin.ModelAdmin):
inlines = [InfluenceInline]
errors = SongAdmin(Song, AdminSite()).check()
expected = [
checks.Error(
"'ct_field' references 'nonexistent', which is not a field on 'admin_checks.Influence'.",
hint=None,
obj=InfluenceInline,
id='admin.E302',
)
]
self.assertEqual(errors, expected)
def test_generic_inline_model_admin_bad_fk_field(self):
"A GenericInlineModelAdmin raises problems if the ct_fk_field points to a non-existent field."
class InfluenceInline(GenericStackedInline):
model = Influence
ct_fk_field = 'nonexistent'
class SongAdmin(admin.ModelAdmin):
inlines = [InfluenceInline]
errors = SongAdmin(Song, AdminSite()).check()
expected = [
checks.Error(
"'ct_fk_field' references 'nonexistent', which is not a field on 'admin_checks.Influence'.",
hint=None,
obj=InfluenceInline,
id='admin.E303',
)
]
self.assertEqual(errors, expected)
def test_generic_inline_model_admin_non_gfk_ct_field(self):
"""
A GenericInlineModelAdmin raises problems if the ct_field points to a
field that isn't part of a GenericForeignKey.
"""
class InfluenceInline(GenericStackedInline):
model = Influence
ct_field = 'name'
class SongAdmin(admin.ModelAdmin):
inlines = [InfluenceInline]
errors = SongAdmin(Song, AdminSite()).check()
expected = [
checks.Error(
"'admin_checks.Influence' has no GenericForeignKey using "
"content type field 'name' and object ID field 'object_id'.",
hint=None,
obj=InfluenceInline,
id='admin.E304',
)
]
self.assertEqual(errors, expected)
def test_generic_inline_model_admin_non_gfk_fk_field(self):
"""
A GenericInlineModelAdmin raises problems if the ct_fk_field points to
a field that isn't part of a GenericForeignKey.
"""
class InfluenceInline(GenericStackedInline):
model = Influence
ct_fk_field = 'name'
class SongAdmin(admin.ModelAdmin):
inlines = [InfluenceInline]
errors = SongAdmin(Song, AdminSite()).check()
expected = [
checks.Error(
"'admin_checks.Influence' has no GenericForeignKey using "
"content type field 'content_type' and object ID field 'name'.",
hint=None,
obj=InfluenceInline,
id='admin.E304',
)
]
self.assertEqual(errors, expected)
def test_app_label_in_admin_checks(self):
"""
Regression test for #15669 - Include app label in admin system check messages
"""
class RawIdNonexistingAdmin(admin.ModelAdmin):
raw_id_fields = ('nonexisting',)
errors = RawIdNonexistingAdmin(Album, AdminSite()).check()
expected = [
checks.Error(
"The value of 'raw_id_fields[0]' refers to 'nonexisting', "
"which is not an attribute of 'admin_checks.Album'.",
hint=None,
obj=RawIdNonexistingAdmin,
id='admin.E002',
)
]
self.assertEqual(errors, expected)
def test_fk_exclusion(self):
"""
Regression test for #11709 - when testing for fk excluding (when exclude is
given) make sure fk_name is honored or things blow up when there is more
than one fk to the parent model.
"""
class TwoAlbumFKAndAnEInline(admin.TabularInline):
model = TwoAlbumFKAndAnE
exclude = ("e",)
fk_name = "album1"
class MyAdmin(admin.ModelAdmin):
inlines = [TwoAlbumFKAndAnEInline]
errors = MyAdmin(Album, AdminSite()).check()
self.assertEqual(errors, [])
def test_inline_self_check(self):
class TwoAlbumFKAndAnEInline(admin.TabularInline):
model = TwoAlbumFKAndAnE
class MyAdmin(admin.ModelAdmin):
inlines = [TwoAlbumFKAndAnEInline]
errors = MyAdmin(Album, AdminSite()).check()
expected = [
checks.Error(
"'admin_checks.TwoAlbumFKAndAnE' has more than one ForeignKey to 'admin_checks.Album'.",
hint=None,
obj=TwoAlbumFKAndAnEInline,
id='admin.E202',
)
]
self.assertEqual(errors, expected)
def test_inline_with_specified(self):
class TwoAlbumFKAndAnEInline(admin.TabularInline):
model = TwoAlbumFKAndAnE
fk_name = "album1"
class MyAdmin(admin.ModelAdmin):
inlines = [TwoAlbumFKAndAnEInline]
errors = MyAdmin(Album, AdminSite()).check()
self.assertEqual(errors, [])
def test_readonly(self):
class SongAdmin(admin.ModelAdmin):
readonly_fields = ("title",)
errors = SongAdmin(Song, AdminSite()).check()
self.assertEqual(errors, [])
def test_readonly_on_method(self):
def my_function(obj):
pass
class SongAdmin(admin.ModelAdmin):
readonly_fields = (my_function,)
errors = SongAdmin(Song, AdminSite()).check()
self.assertEqual(errors, [])
def test_readonly_on_modeladmin(self):
class SongAdmin(admin.ModelAdmin):
readonly_fields = ("readonly_method_on_modeladmin",)
def readonly_method_on_modeladmin(self, obj):
pass
errors = SongAdmin(Song, AdminSite()).check()
self.assertEqual(errors, [])
def test_readonly_dynamic_attribute_on_modeladmin(self):
class SongAdmin(admin.ModelAdmin):
readonly_fields = ("dynamic_method",)
def __getattr__(self, item):
if item == "dynamic_method":
def method(obj):
pass
return method
raise AttributeError
errors = SongAdmin(Song, AdminSite()).check()
self.assertEqual(errors, [])
def test_readonly_method_on_model(self):
class SongAdmin(admin.ModelAdmin):
readonly_fields = ("readonly_method_on_model",)
errors = SongAdmin(Song, AdminSite()).check()
self.assertEqual(errors, [])
def test_nonexistent_field(self):
class SongAdmin(admin.ModelAdmin):
readonly_fields = ("title", "nonexistent")
errors = SongAdmin(Song, AdminSite()).check()
expected = [
checks.Error(
("The value of 'readonly_fields[1]' is not a callable, an attribute "
"of 'SongAdmin', or an attribute of 'admin_checks.Song'."),
hint=None,
obj=SongAdmin,
id='admin.E035',
)
]
self.assertEqual(errors, expected)
def test_nonexistent_field_on_inline(self):
class CityInline(admin.TabularInline):
model = City
readonly_fields = ['i_dont_exist'] # Missing attribute
errors = CityInline(State, AdminSite()).check()
expected = [
checks.Error(
("The value of 'readonly_fields[0]' is not a callable, an attribute "
"of 'CityInline', or an attribute of 'admin_checks.City'."),
hint=None,
obj=CityInline,
id='admin.E035',
)
]
self.assertEqual(errors, expected)
def test_extra(self):
class SongAdmin(admin.ModelAdmin):
def awesome_song(self, instance):
if instance.title == "Born to Run":
return "Best Ever!"
return "Status unknown."
errors = SongAdmin(Song, AdminSite()).check()
self.assertEqual(errors, [])
def test_readonly_lambda(self):
class SongAdmin(admin.ModelAdmin):
readonly_fields = (lambda obj: "test",)
errors = SongAdmin(Song, AdminSite()).check()
self.assertEqual(errors, [])
def test_graceful_m2m_fail(self):
"""
Regression test for #12203/#12237 - Fail more gracefully when a M2M field that
specifies the 'through' option is included in the 'fields' or the 'fieldsets'
ModelAdmin options.
"""
class BookAdmin(admin.ModelAdmin):
fields = ['authors']
errors = BookAdmin(Book, AdminSite()).check()
expected = [
checks.Error(
("The value of 'fields' cannot include the ManyToManyField 'authors', "
"because that field manually specifies a relationship model."),
hint=None,
obj=BookAdmin,
id='admin.E013',
)
]
self.assertEqual(errors, expected)
def test_cannot_include_through(self):
class FieldsetBookAdmin(admin.ModelAdmin):
fieldsets = (
('Header 1', {'fields': ('name',)}),
('Header 2', {'fields': ('authors',)}),
)
errors = FieldsetBookAdmin(Book, AdminSite()).check()
expected = [
checks.Error(
("The value of 'fieldsets[1][1][\"fields\"]' cannot include the ManyToManyField "
"'authors', because that field manually specifies a relationship model."),
hint=None,
obj=FieldsetBookAdmin,
id='admin.E013',
)
]
self.assertEqual(errors, expected)
def test_nested_fields(self):
class NestedFieldsAdmin(admin.ModelAdmin):
fields = ('price', ('name', 'subtitle'))
errors = NestedFieldsAdmin(Book, AdminSite()).check()
self.assertEqual(errors, [])
def test_nested_fieldsets(self):
class NestedFieldsetAdmin(admin.ModelAdmin):
fieldsets = (
('Main', {'fields': ('price', ('name', 'subtitle'))}),
)
errors = NestedFieldsetAdmin(Book, AdminSite()).check()
self.assertEqual(errors, [])
def test_explicit_through_override(self):
"""
Regression test for #12209 -- If the explicitly provided through model
is specified as a string, the admin should still be able use
Model.m2m_field.through
"""
class AuthorsInline(admin.TabularInline):
model = Book.authors.through
class BookAdmin(admin.ModelAdmin):
inlines = [AuthorsInline]
errors = BookAdmin(Book, AdminSite()).check()
self.assertEqual(errors, [])
def test_non_model_fields(self):
"""
Regression for ensuring ModelAdmin.fields can contain non-model fields
that broke with r11737
"""
class SongForm(forms.ModelForm):
extra_data = forms.CharField()
class FieldsOnFormOnlyAdmin(admin.ModelAdmin):
form = SongForm
fields = ['title', 'extra_data']
errors = FieldsOnFormOnlyAdmin(Song, AdminSite()).check()
self.assertEqual(errors, [])
def test_non_model_first_field(self):
"""
Regression for ensuring ModelAdmin.field can handle first elem being a
non-model field (test fix for UnboundLocalError introduced with r16225).
"""
class SongForm(forms.ModelForm):
extra_data = forms.CharField()
class Meta:
model = Song
fields = '__all__'
class FieldsOnFormOnlyAdmin(admin.ModelAdmin):
form = SongForm
fields = ['extra_data', 'title']
errors = FieldsOnFormOnlyAdmin(Song, AdminSite()).check()
self.assertEqual(errors, [])
def test_check_sublists_for_duplicates(self):
class MyModelAdmin(admin.ModelAdmin):
fields = ['state', ['state']]
errors = MyModelAdmin(Song, AdminSite()).check()
expected = [
checks.Error(
"The value of 'fields' contains duplicate field(s).",
hint=None,
obj=MyModelAdmin,
id='admin.E006'
)
]
self.assertEqual(errors, expected)
def test_check_fieldset_sublists_for_duplicates(self):
class MyModelAdmin(admin.ModelAdmin):
fieldsets = [
(None, {
'fields': ['title', 'album', ('title', 'album')]
}),
]
errors = MyModelAdmin(Song, AdminSite()).check()
expected = [
checks.Error(
"There are duplicate field(s) in 'fieldsets[0][1]'.",
hint=None,
obj=MyModelAdmin,
id='admin.E012'
)
]
self.assertEqual(errors, expected)
def test_list_filter_works_on_through_field_even_when_apps_not_ready(self):
"""
Ensure list_filter can access reverse fields even when the app registry
is not ready; refs #24146.
"""
class BookAdminWithListFilter(admin.ModelAdmin):
list_filter = ['authorsbooks__featured']
# Temporarily pretending apps are not ready yet. This issue can happen
# if the value of 'list_filter' refers to a 'through__field'.
Book._meta.apps.ready = False
try:
errors = BookAdminWithListFilter(Book, AdminSite()).check()
self.assertEqual(errors, [])
finally:
Book._meta.apps.ready = True
|
bsd-3-clause
|
maurofaccenda/ansible
|
lib/ansible/modules/web_infrastructure/ansible_tower/tower_host.py
|
33
|
5923
|
#!/usr/bin/python
#coding: utf-8 -*-
# (c) 2017, Wayne Witzel III <[email protected]>
#
# This module is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this software. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: tower_host
version_added: "2.3"
author: "Wayne Witzel III (@wwitzel3)"
short_description: create, update, or destroy Ansible Tower host.
description:
- Create, update, or destroy Ansible Tower hosts. See
U(https://www.ansible.com/tower) for an overview.
options:
name:
description:
- The name to use for the host.
required: True
description:
description:
- The description to use for the host.
required: False
default: null
inventory:
description:
- Inventory the host should be made a member of.
required: True
enabled:
description:
- If the host should be enabled.
required: False
default: True
variables:
description:
- Variables to use for the host. Use '@' for a file.
state:
description:
- Desired state of the resource.
required: False
default: "present"
choices: ["present", "absent"]
tower_host:
description:
- URL to your Tower instance.
required: False
default: null
tower_username:
description:
- Username for your Tower instance.
required: False
default: null
tower_password:
description:
- Password for your Tower instance.
required: False
default: null
tower_verify_ssl:
description:
- Dis/allow insecure connections to Tower. If C(no), SSL certificates will not be validated.
This should only be used on personally controlled sites using self-signed certificates.
required: False
default: True
tower_config_file:
description:
- Path to the Tower config file. See notes.
required: False
default: null
requirements:
- "python >= 2.6"
- "ansible-tower-cli >= 3.0.3"
notes:
- If no I(config_file) is provided we will attempt to use the tower-cli library
defaults to find your Tower host information.
- I(config_file) should contain Tower configuration in the following format
host=hostname
username=username
password=password
'''
EXAMPLES = '''
- name: Add tower host
tower_host:
name: localhost
description: "Local Host Group"
inventory: "Local Inventory"
state: present
tower_config_file: "~/tower_cli.cfg"
'''
try:
import os
import tower_cli
import tower_cli.utils.exceptions as exc
from tower_cli.conf import settings
from ansible.module_utils.ansible_tower import tower_auth_config, tower_check_mode
HAS_TOWER_CLI = True
except ImportError:
HAS_TOWER_CLI = False
def main():
module = AnsibleModule(
argument_spec = dict(
name = dict(required=True),
description = dict(),
inventory = dict(required=True),
enabled = dict(type='bool', default=True),
variables = dict(),
tower_host = dict(),
tower_username = dict(),
tower_password = dict(no_log=True),
tower_verify_ssl = dict(type='bool', default=True),
tower_config_file = dict(type='path'),
state = dict(choices=['present', 'absent'], default='present'),
),
supports_check_mode=True
)
if not HAS_TOWER_CLI:
module.fail_json(msg='ansible-tower-cli required for this module')
name = module.params.get('name')
description = module.params.get('description')
inventory = module.params.get('inventory')
enabled = module.params.get('enabled')
state = module.params.get('state')
variables = module.params.get('variables')
if variables:
if variables.startswith('@'):
filename = os.path.expanduser(variables[1:])
variables = module.contents_from_file(filename)
json_output = {'host': name, 'state': state}
tower_auth = tower_auth_config(module)
with settings.runtime_values(**tower_auth):
tower_check_mode(module)
host = tower_cli.get_resource('host')
try:
inv_res = tower_cli.get_resource('inventory')
inv = inv_res.get(name=inventory)
if state == 'present':
result = host.modify(name=name, inventory=inv['id'], enabled=enabled,
variables=variables, description=description, create_on_missing=True)
json_output['id'] = result['id']
elif state == 'absent':
result = host.delete(name=name, inventory=inv['id'])
except (exc.NotFound) as excinfo:
module.fail_json(msg='Failed to update host, inventory not found: {0}'.format(excinfo), changed=False)
except (exc.ConnectionError, exc.BadRequest) as excinfo:
module.fail_json(msg='Failed to update host: {0}'.format(excinfo), changed=False)
json_output['changed'] = result['changed']
module.exit_json(**json_output)
from ansible.module_utils.basic import AnsibleModule
if __name__ == '__main__':
main()
|
gpl-3.0
|
jungle90/Openstack-Swift-I-O-throttler
|
test/unit/common/middleware/test_staticweb.py
|
19
|
34527
|
# Copyright (c) 2010 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
try:
import simplejson as json
except ImportError:
import json
import json as stdlib_json
import unittest
import mock
from swift.common.swob import Request, Response
from swift.common.middleware import staticweb
meta_map = {
'c1': {'status': 401},
'c2': {},
'c3': {'meta': {'web-index': 'index.html',
'web-listings': 't'}},
'c3b': {'meta': {'web-index': 'index.html',
'web-listings': 't'}},
'c4': {'meta': {'web-index': 'index.html',
'web-error': 'error.html',
'web-listings': 't',
'web-listings-css': 'listing.css',
'web-directory-type': 'text/dir'}},
'c5': {'meta': {'web-index': 'index.html',
'web-error': 'error.html',
'web-listings': 't',
'web-listings-css': 'listing.css'}},
'c6': {'meta': {'web-listings': 't'}},
'c7': {'meta': {'web-listings': 'f'}},
'c8': {'meta': {'web-error': 'error.html',
'web-listings': 't',
'web-listings-css':
'http://localhost/stylesheets/listing.css'}},
'c9': {'meta': {'web-error': 'error.html',
'web-listings': 't',
'web-listings-css':
'/absolute/listing.css'}},
'c10': {'meta': {'web-listings': 't'}},
'c11': {'meta': {'web-index': 'index.html'}},
'c11a': {'meta': {'web-index': 'index.html',
'web-directory-type': 'text/directory'}},
'c12': {'meta': {'web-index': 'index.html',
'web-error': 'error.html'}},
'c13': {'meta': {'web-listings': 'f',
'web-listings-css': 'listing.css'}},
}
def mock_get_container_info(env, app, swift_source='SW'):
container = env['PATH_INFO'].rstrip('/').split('/')[3]
container_info = meta_map[container]
container_info.setdefault('status', 200)
container_info.setdefault('read_acl', '.r:*')
return container_info
class FakeApp(object):
def __init__(self, status_headers_body_iter=None):
self.calls = 0
self.get_c4_called = False
def __call__(self, env, start_response):
self.calls += 1
if env['PATH_INFO'] == '/':
return Response(status='404 Not Found')(env, start_response)
elif env['PATH_INFO'] == '/v1':
return Response(
status='412 Precondition Failed')(env, start_response)
elif env['PATH_INFO'] == '/v1/a':
return Response(status='401 Unauthorized')(env, start_response)
elif env['PATH_INFO'] == '/v1/a/c1':
return Response(status='401 Unauthorized')(env, start_response)
elif env['PATH_INFO'] == '/v1/a/c2':
return self.listing(env, start_response)
elif env['PATH_INFO'] == '/v1/a/c2/one.txt':
return Response(status='404 Not Found')(env, start_response)
elif env['PATH_INFO'] == '/v1/a/c3':
return self.listing(env, start_response)
elif env['PATH_INFO'] == '/v1/a/c3/index.html':
return Response(status='200 Ok', body='''
<html>
<body>
<h1>Test main index.html file.</h1>
<p>Visit <a href="subdir">subdir</a>.</p>
<p>Don't visit <a href="subdir2/">subdir2</a> because it doesn't really
exist.</p>
<p>Visit <a href="subdir3">subdir3</a>.</p>
<p>Visit <a href="subdir3/subsubdir">subdir3/subsubdir</a>.</p>
</body>
</html>
''')(env, start_response)
elif env['PATH_INFO'] == '/v1/a/c3b':
return self.listing(env, start_response)
elif env['PATH_INFO'] == '/v1/a/c3b/index.html':
resp = Response(status='204 No Content')
resp.app_iter = iter([])
return resp(env, start_response)
elif env['PATH_INFO'] == '/v1/a/c3/subdir':
return Response(status='404 Not Found')(env, start_response)
elif env['PATH_INFO'] == '/v1/a/c3/subdir/':
return Response(status='404 Not Found')(env, start_response)
elif env['PATH_INFO'] == '/v1/a/c3/subdir/index.html':
return Response(status='404 Not Found')(env, start_response)
elif env['PATH_INFO'] == '/v1/a/c3/subdir3/subsubdir':
return Response(status='404 Not Found')(env, start_response)
elif env['PATH_INFO'] == '/v1/a/c3/subdir3/subsubdir/':
return Response(status='404 Not Found')(env, start_response)
elif env['PATH_INFO'] == '/v1/a/c3/subdir3/subsubdir/index.html':
return Response(status='200 Ok', body='index file')(env,
start_response)
elif env['PATH_INFO'] == '/v1/a/c3/subdirx/':
return Response(status='404 Not Found')(env, start_response)
elif env['PATH_INFO'] == '/v1/a/c3/subdirx/index.html':
return Response(status='404 Not Found')(env, start_response)
elif env['PATH_INFO'] == '/v1/a/c3/subdiry/':
return Response(status='404 Not Found')(env, start_response)
elif env['PATH_INFO'] == '/v1/a/c3/subdiry/index.html':
return Response(status='404 Not Found')(env, start_response)
elif env['PATH_INFO'] == '/v1/a/c3/subdirz':
return Response(status='404 Not Found')(env, start_response)
elif env['PATH_INFO'] == '/v1/a/c3/subdirz/index.html':
return Response(status='404 Not Found')(env, start_response)
elif env['PATH_INFO'] == '/v1/a/c3/unknown':
return Response(status='404 Not Found')(env, start_response)
elif env['PATH_INFO'] == '/v1/a/c3/unknown/index.html':
return Response(status='404 Not Found')(env, start_response)
elif env['PATH_INFO'] == '/v1/a/c4':
self.get_c4_called = True
return self.listing(env, start_response)
elif env['PATH_INFO'] == '/v1/a/c4/one.txt':
return Response(
status='200 Ok',
headers={'x-object-meta-test': 'value'},
body='1')(env, start_response)
elif env['PATH_INFO'] == '/v1/a/c4/two.txt':
return Response(status='503 Service Unavailable')(env,
start_response)
elif env['PATH_INFO'] == '/v1/a/c4/index.html':
return Response(status='404 Not Found')(env, start_response)
elif env['PATH_INFO'] == '/v1/a/c4/subdir/':
return Response(status='404 Not Found')(env, start_response)
elif env['PATH_INFO'] == '/v1/a/c4/subdir/index.html':
return Response(status='404 Not Found')(env, start_response)
elif env['PATH_INFO'] == '/v1/a/c4/unknown':
return Response(status='404 Not Found')(env, start_response)
elif env['PATH_INFO'] == '/v1/a/c4/unknown/index.html':
return Response(status='404 Not Found')(env, start_response)
elif env['PATH_INFO'] == '/v1/a/c4/404error.html':
return Response(status='200 Ok', body='''
<html>
<body style="background: #000000; color: #ffaaaa">
<p>Chrome's 404 fancy-page sucks.</p>
</body>
</html>
'''.strip())(env, start_response)
elif env['PATH_INFO'] == '/v1/a/c5':
return self.listing(env, start_response)
elif env['PATH_INFO'] == '/v1/a/c5/index.html':
return Response(status='503 Service Unavailable')(env,
start_response)
elif env['PATH_INFO'] == '/v1/a/c5/503error.html':
return Response(status='404 Not Found')(env, start_response)
elif env['PATH_INFO'] == '/v1/a/c5/unknown':
return Response(status='404 Not Found')(env, start_response)
elif env['PATH_INFO'] == '/v1/a/c5/unknown/index.html':
return Response(status='404 Not Found')(env, start_response)
elif env['PATH_INFO'] == '/v1/a/c5/404error.html':
return Response(status='404 Not Found')(env, start_response)
elif env['PATH_INFO'] == '/v1/a/c6':
return self.listing(env, start_response)
elif env['PATH_INFO'] == '/v1/a/c6/subdir':
return Response(status='404 Not Found')(env, start_response)
elif env['PATH_INFO'] in ('/v1/a/c7', '/v1/a/c7/'):
return self.listing(env, start_response)
elif env['PATH_INFO'] in ('/v1/a/c8', '/v1/a/c8/'):
return self.listing(env, start_response)
elif env['PATH_INFO'] == '/v1/a/c8/subdir/':
return Response(status='404 Not Found')(env, start_response)
elif env['PATH_INFO'] in ('/v1/a/c9', '/v1/a/c9/'):
return self.listing(env, start_response)
elif env['PATH_INFO'] == '/v1/a/c9/subdir/':
return Response(status='404 Not Found')(env, start_response)
elif env['PATH_INFO'] in ('/v1/a/c10', '/v1/a/c10/'):
return self.listing(env, start_response)
elif env['PATH_INFO'] == '/v1/a/c10/\xe2\x98\x83/':
return Response(status='404 Not Found')(env, start_response)
elif env['PATH_INFO'] == '/v1/a/c10/\xe2\x98\x83/\xe2\x98\x83/':
return Response(status='404 Not Found')(env, start_response)
elif env['PATH_INFO'] in ('/v1/a/c11', '/v1/a/c11/'):
return self.listing(env, start_response)
elif env['PATH_INFO'] == '/v1/a/c11/subdir/':
return Response(status='200 Ok', headers={
'Content-Type': 'application/directory'})(
env, start_response)
elif env['PATH_INFO'] == '/v1/a/c11/subdir/index.html':
return Response(status='200 Ok', body='''
<html>
<body>
<h2>c11 subdir index</h2>
</body>
</html>
'''.strip())(env, start_response)
elif env['PATH_INFO'] == '/v1/a/c11/subdir2/':
return Response(status='200 Ok', headers={'Content-Type':
'application/directory'})(env, start_response)
elif env['PATH_INFO'] == '/v1/a/c11/subdir2/index.html':
return Response(status='404 Not Found')(env, start_response)
elif env['PATH_INFO'] in ('/v1/a/c11a', '/v1/a/c11a/'):
return self.listing(env, start_response)
elif env['PATH_INFO'] == '/v1/a/c11a/subdir/':
return Response(status='200 Ok', headers={'Content-Type':
'text/directory'})(env, start_response)
elif env['PATH_INFO'] == '/v1/a/c11a/subdir/index.html':
return Response(status='404 Not Found')(env, start_response)
elif env['PATH_INFO'] == '/v1/a/c11a/subdir2/':
return Response(status='200 Ok', headers={'Content-Type':
'application/directory'})(env, start_response)
elif env['PATH_INFO'] == '/v1/a/c11a/subdir2/index.html':
return Response(status='404 Not Found')(env, start_response)
elif env['PATH_INFO'] == '/v1/a/c11a/subdir3/':
return Response(status='200 Ok', headers={'Content-Type':
'not_a/directory'})(env, start_response)
elif env['PATH_INFO'] == '/v1/a/c11a/subdir3/index.html':
return Response(status='404 Not Found')(env, start_response)
elif env['PATH_INFO'] == '/v1/a/c12/index.html':
return Response(status='200 Ok', body='index file')(env,
start_response)
elif env['PATH_INFO'] == '/v1/a/c12/200error.html':
return Response(status='200 Ok', body='error file')(env,
start_response)
else:
raise Exception('Unknown path %r' % env['PATH_INFO'])
def listing(self, env, start_response):
headers = {'x-container-read': '.r:*'}
if ((env['PATH_INFO'] in (
'/v1/a/c3', '/v1/a/c4', '/v1/a/c8', '/v1/a/c9'))
and (env['QUERY_STRING'] ==
'delimiter=/&format=json&prefix=subdir/')):
headers.update({'X-Container-Object-Count': '12',
'X-Container-Bytes-Used': '73763',
'X-Container-Read': '.r:*',
'Content-Type': 'application/json; charset=utf-8'})
body = '''
[{"name":"subdir/1.txt",
"hash":"5f595114a4b3077edfac792c61ca4fe4", "bytes":20,
"content_type":"text/plain",
"last_modified":"2011-03-24T04:27:52.709100"},
{"name":"subdir/2.txt",
"hash":"c85c1dcd19cf5cbac84e6043c31bb63e", "bytes":20,
"content_type":"text/plain",
"last_modified":"2011-03-24T04:27:52.734140"},
{"subdir":"subdir3/subsubdir/"}]
'''.strip()
elif env['PATH_INFO'] == '/v1/a/c3' and env['QUERY_STRING'] == \
'delimiter=/&format=json&prefix=subdiry/':
headers.update({'X-Container-Object-Count': '12',
'X-Container-Bytes-Used': '73763',
'X-Container-Read': '.r:*',
'Content-Type': 'application/json; charset=utf-8'})
body = '[]'
elif env['PATH_INFO'] == '/v1/a/c3' and env['QUERY_STRING'] == \
'limit=1&format=json&delimiter=/&limit=1&prefix=subdirz/':
headers.update({'X-Container-Object-Count': '12',
'X-Container-Bytes-Used': '73763',
'X-Container-Read': '.r:*',
'Content-Type': 'application/json; charset=utf-8'})
body = '''
[{"name":"subdirz/1.txt",
"hash":"5f595114a4b3077edfac792c61ca4fe4", "bytes":20,
"content_type":"text/plain",
"last_modified":"2011-03-24T04:27:52.709100"}]
'''.strip()
elif env['PATH_INFO'] == '/v1/a/c6' and env['QUERY_STRING'] == \
'limit=1&format=json&delimiter=/&limit=1&prefix=subdir/':
headers.update({'X-Container-Object-Count': '12',
'X-Container-Bytes-Used': '73763',
'X-Container-Read': '.r:*',
'X-Container-Web-Listings': 't',
'Content-Type': 'application/json; charset=utf-8'})
body = '''
[{"name":"subdir/1.txt",
"hash":"5f595114a4b3077edfac792c61ca4fe4", "bytes":20,
"content_type":"text/plain",
"last_modified":"2011-03-24T04:27:52.709100"}]
'''.strip()
elif env['PATH_INFO'] == '/v1/a/c10' and (
env['QUERY_STRING'] ==
'delimiter=/&format=json&prefix=%E2%98%83/' or
env['QUERY_STRING'] ==
'delimiter=/&format=json&prefix=%E2%98%83/%E2%98%83/'):
headers.update({'X-Container-Object-Count': '12',
'X-Container-Bytes-Used': '73763',
'X-Container-Read': '.r:*',
'X-Container-Web-Listings': 't',
'Content-Type': 'application/json; charset=utf-8'})
body = '''
[{"name":"\u2603/\u2603/one.txt",
"hash":"73f1dd69bacbf0847cc9cffa3c6b23a1", "bytes":22,
"content_type":"text/plain",
"last_modified":"2011-03-24T04:27:52.709100"},
{"subdir":"\u2603/\u2603/"}]
'''.strip()
elif 'prefix=' in env['QUERY_STRING']:
return Response(status='204 No Content')(env, start_response)
elif 'format=json' in env['QUERY_STRING']:
headers.update({'X-Container-Object-Count': '12',
'X-Container-Bytes-Used': '73763',
'Content-Type': 'application/json; charset=utf-8'})
body = '''
[{"name":"401error.html",
"hash":"893f8d80692a4d3875b45be8f152ad18", "bytes":110,
"content_type":"text/html",
"last_modified":"2011-03-24T04:27:52.713710"},
{"name":"404error.html",
"hash":"62dcec9c34ed2b347d94e6ca707aff8c", "bytes":130,
"content_type":"text/html",
"last_modified":"2011-03-24T04:27:52.720850"},
{"name":"index.html",
"hash":"8b469f2ca117668a5131fe9ee0815421", "bytes":347,
"content_type":"text/html",
"last_modified":"2011-03-24T04:27:52.683590"},
{"name":"listing.css",
"hash":"7eab5d169f3fcd06a08c130fa10c5236", "bytes":17,
"content_type":"text/css",
"last_modified":"2011-03-24T04:27:52.721610"},
{"name":"one.txt", "hash":"73f1dd69bacbf0847cc9cffa3c6b23a1",
"bytes":22, "content_type":"text/plain",
"last_modified":"2011-03-24T04:27:52.722270"},
{"name":"subdir/1.txt",
"hash":"5f595114a4b3077edfac792c61ca4fe4", "bytes":20,
"content_type":"text/plain",
"last_modified":"2011-03-24T04:27:52.709100"},
{"name":"subdir/2.txt",
"hash":"c85c1dcd19cf5cbac84e6043c31bb63e", "bytes":20,
"content_type":"text/plain",
"last_modified":"2011-03-24T04:27:52.734140"},
{"name":"subdir/\u2603.txt",
"hash":"7337d028c093130898d937c319cc9865", "bytes":72981,
"content_type":"text/plain",
"last_modified":"2011-03-24T04:27:52.735460"},
{"name":"subdir2", "hash":"d41d8cd98f00b204e9800998ecf8427e",
"bytes":0, "content_type":"text/directory",
"last_modified":"2011-03-24T04:27:52.676690"},
{"name":"subdir3/subsubdir/index.html",
"hash":"04eea67110f883b1a5c97eb44ccad08c", "bytes":72,
"content_type":"text/html",
"last_modified":"2011-03-24T04:27:52.751260"},
{"name":"two.txt", "hash":"10abb84c63a5cff379fdfd6385918833",
"bytes":22, "content_type":"text/plain",
"last_modified":"2011-03-24T04:27:52.825110"},
{"name":"\u2603/\u2603/one.txt",
"hash":"73f1dd69bacbf0847cc9cffa3c6b23a1", "bytes":22,
"content_type":"text/plain",
"last_modified":"2011-03-24T04:27:52.935560"}]
'''.strip()
else:
headers.update({'X-Container-Object-Count': '12',
'X-Container-Bytes-Used': '73763',
'Content-Type': 'text/plain; charset=utf-8'})
body = '\n'.join(['401error.html', '404error.html', 'index.html',
'listing.css', 'one.txt', 'subdir/1.txt',
'subdir/2.txt', u'subdir/\u2603.txt', 'subdir2',
'subdir3/subsubdir/index.html', 'two.txt',
u'\u2603/\u2603/one.txt'])
return Response(status='200 Ok', headers=headers,
body=body)(env, start_response)
class TestStaticWeb(unittest.TestCase):
def setUp(self):
self.app = FakeApp()
self.test_staticweb = staticweb.filter_factory({})(self.app)
self._orig_get_container_info = staticweb.get_container_info
staticweb.get_container_info = mock_get_container_info
def tearDown(self):
staticweb.get_container_info = self._orig_get_container_info
def test_app_set(self):
app = FakeApp()
sw = staticweb.filter_factory({})(app)
self.assertEquals(sw.app, app)
def test_conf_set(self):
conf = {'blah': 1}
sw = staticweb.filter_factory(conf)(FakeApp())
self.assertEquals(sw.conf, conf)
def test_root(self):
resp = Request.blank('/').get_response(self.test_staticweb)
self.assertEquals(resp.status_int, 404)
def test_version(self):
resp = Request.blank('/v1').get_response(self.test_staticweb)
self.assertEquals(resp.status_int, 412)
def test_account(self):
resp = Request.blank('/v1/a').get_response(self.test_staticweb)
self.assertEquals(resp.status_int, 401)
def test_container1(self):
resp = Request.blank('/v1/a/c1').get_response(self.test_staticweb)
self.assertEquals(resp.status_int, 401)
def test_container1_web_mode_explicitly_off(self):
resp = Request.blank('/v1/a/c1',
headers={'x-web-mode': 'false'}).get_response(
self.test_staticweb)
self.assertEquals(resp.status_int, 401)
def test_container1_web_mode_explicitly_on(self):
resp = Request.blank('/v1/a/c1',
headers={'x-web-mode': 'true'}).get_response(
self.test_staticweb)
self.assertEquals(resp.status_int, 404)
def test_container2(self):
resp = Request.blank('/v1/a/c2').get_response(self.test_staticweb)
self.assertEquals(resp.status_int, 200)
self.assertEquals(resp.content_type, 'text/plain')
self.assertEquals(len(resp.body.split('\n')),
int(resp.headers['x-container-object-count']))
def test_container2_web_mode_explicitly_off(self):
resp = Request.blank(
'/v1/a/c2',
headers={'x-web-mode': 'false'}).get_response(self.test_staticweb)
self.assertEquals(resp.status_int, 200)
self.assertEquals(resp.content_type, 'text/plain')
self.assertEquals(len(resp.body.split('\n')),
int(resp.headers['x-container-object-count']))
def test_container2_web_mode_explicitly_on(self):
resp = Request.blank(
'/v1/a/c2',
headers={'x-web-mode': 'true'}).get_response(self.test_staticweb)
self.assertEquals(resp.status_int, 404)
def test_container2onetxt(self):
resp = Request.blank(
'/v1/a/c2/one.txt').get_response(self.test_staticweb)
self.assertEquals(resp.status_int, 404)
def test_container2json(self):
resp = Request.blank(
'/v1/a/c2?format=json').get_response(self.test_staticweb)
self.assertEquals(resp.status_int, 200)
self.assertEquals(resp.content_type, 'application/json')
self.assertEquals(len(json.loads(resp.body)),
int(resp.headers['x-container-object-count']))
def test_container2json_web_mode_explicitly_off(self):
resp = Request.blank(
'/v1/a/c2?format=json',
headers={'x-web-mode': 'false'}).get_response(self.test_staticweb)
self.assertEquals(resp.status_int, 200)
self.assertEquals(resp.content_type, 'application/json')
self.assertEquals(len(json.loads(resp.body)),
int(resp.headers['x-container-object-count']))
def test_container2json_web_mode_explicitly_on(self):
resp = Request.blank(
'/v1/a/c2?format=json',
headers={'x-web-mode': 'true'}).get_response(self.test_staticweb)
self.assertEquals(resp.status_int, 404)
def test_container3(self):
resp = Request.blank('/v1/a/c3').get_response(self.test_staticweb)
self.assertEquals(resp.status_int, 301)
self.assertEquals(resp.headers['location'],
'http://localhost/v1/a/c3/')
def test_container3indexhtml(self):
resp = Request.blank('/v1/a/c3/').get_response(self.test_staticweb)
self.assertEquals(resp.status_int, 200)
self.assert_('Test main index.html file.' in resp.body)
def test_container3subsubdir(self):
resp = Request.blank(
'/v1/a/c3/subdir3/subsubdir').get_response(self.test_staticweb)
self.assertEquals(resp.status_int, 301)
def test_container3subsubdircontents(self):
resp = Request.blank(
'/v1/a/c3/subdir3/subsubdir/').get_response(self.test_staticweb)
self.assertEquals(resp.status_int, 200)
self.assertEquals(resp.body, 'index file')
def test_container3subdir(self):
resp = Request.blank(
'/v1/a/c3/subdir/').get_response(self.test_staticweb)
self.assertEquals(resp.status_int, 200)
self.assert_('Listing of /v1/a/c3/subdir/' in resp.body)
self.assert_('</style>' in resp.body)
self.assert_('<link' not in resp.body)
self.assert_('listing.css' not in resp.body)
def test_container3subdirx(self):
resp = Request.blank(
'/v1/a/c3/subdirx/').get_response(self.test_staticweb)
self.assertEquals(resp.status_int, 404)
def test_container3subdiry(self):
resp = Request.blank(
'/v1/a/c3/subdiry/').get_response(self.test_staticweb)
self.assertEquals(resp.status_int, 404)
def test_container3subdirz(self):
resp = Request.blank(
'/v1/a/c3/subdirz').get_response(self.test_staticweb)
self.assertEquals(resp.status_int, 301)
def test_container3unknown(self):
resp = Request.blank(
'/v1/a/c3/unknown').get_response(self.test_staticweb)
self.assertEquals(resp.status_int, 404)
self.assert_("Chrome's 404 fancy-page sucks." not in resp.body)
def test_container3bindexhtml(self):
resp = Request.blank('/v1/a/c3b/').get_response(self.test_staticweb)
self.assertEquals(resp.status_int, 204)
self.assertEquals(resp.body, '')
def test_container4indexhtml(self):
resp = Request.blank('/v1/a/c4/').get_response(self.test_staticweb)
self.assertEquals(resp.status_int, 200)
self.assert_('Listing of /v1/a/c4/' in resp.body)
self.assert_('href="listing.css"' in resp.body)
def test_container4indexhtmlauthed(self):
resp = Request.blank('/v1/a/c4').get_response(self.test_staticweb)
self.assertEquals(resp.status_int, 301)
resp = Request.blank(
'/v1/a/c4',
environ={'REMOTE_USER': 'authed'}).get_response(
self.test_staticweb)
self.assertEquals(resp.status_int, 200)
resp = Request.blank(
'/v1/a/c4', headers={'x-web-mode': 't'},
environ={'REMOTE_USER': 'authed'}).get_response(
self.test_staticweb)
self.assertEquals(resp.status_int, 301)
def test_container4unknown(self):
resp = Request.blank(
'/v1/a/c4/unknown').get_response(self.test_staticweb)
self.assertEquals(resp.status_int, 404)
self.assert_("Chrome's 404 fancy-page sucks." in resp.body)
def test_container4subdir(self):
resp = Request.blank(
'/v1/a/c4/subdir/').get_response(self.test_staticweb)
self.assertEquals(resp.status_int, 200)
self.assert_('Listing of /v1/a/c4/subdir/' in resp.body)
self.assert_('</style>' not in resp.body)
self.assert_('<link' in resp.body)
self.assert_('href="../listing.css"' in resp.body)
self.assertEquals(resp.headers['content-type'],
'text/html; charset=UTF-8')
def test_container4onetxt(self):
resp = Request.blank(
'/v1/a/c4/one.txt').get_response(self.test_staticweb)
self.assertEquals(resp.status_int, 200)
def test_container4twotxt(self):
resp = Request.blank(
'/v1/a/c4/two.txt').get_response(self.test_staticweb)
self.assertEquals(resp.status_int, 503)
def test_container5indexhtml(self):
resp = Request.blank('/v1/a/c5/').get_response(self.test_staticweb)
self.assertEquals(resp.status_int, 503)
def test_container5unknown(self):
resp = Request.blank(
'/v1/a/c5/unknown').get_response(self.test_staticweb)
self.assertEquals(resp.status_int, 404)
self.assert_("Chrome's 404 fancy-page sucks." not in resp.body)
def test_container6subdir(self):
resp = Request.blank(
'/v1/a/c6/subdir').get_response(self.test_staticweb)
self.assertEquals(resp.status_int, 301)
def test_container7listing(self):
resp = Request.blank('/v1/a/c7/').get_response(self.test_staticweb)
self.assertEquals(resp.status_int, 404)
self.assert_('Web Listing Disabled' in resp.body)
def test_container8listingcss(self):
resp = Request.blank(
'/v1/a/c8/').get_response(self.test_staticweb)
self.assertEquals(resp.status_int, 200)
self.assert_('Listing of /v1/a/c8/' in resp.body)
self.assert_('<link' in resp.body)
self.assert_(
'href="http://localhost/stylesheets/listing.css"' in resp.body)
def test_container8subdirlistingcss(self):
resp = Request.blank(
'/v1/a/c8/subdir/').get_response(self.test_staticweb)
self.assertEquals(resp.status_int, 200)
self.assert_('Listing of /v1/a/c8/subdir/' in resp.body)
self.assert_('<link' in resp.body)
self.assert_(
'href="http://localhost/stylesheets/listing.css"' in resp.body)
def test_container9listingcss(self):
resp = Request.blank(
'/v1/a/c9/').get_response(self.test_staticweb)
self.assertEquals(resp.status_int, 200)
self.assert_('Listing of /v1/a/c9/' in resp.body)
self.assert_('<link' in resp.body)
self.assert_('href="/absolute/listing.css"' in resp.body)
def test_container9subdirlistingcss(self):
resp = Request.blank(
'/v1/a/c9/subdir/').get_response(self.test_staticweb)
self.assertEquals(resp.status_int, 200)
self.assert_('Listing of /v1/a/c9/subdir/' in resp.body)
self.assert_('<link' in resp.body)
self.assert_('href="/absolute/listing.css"' in resp.body)
def test_container10unicodesubdirlisting(self):
resp = Request.blank(
'/v1/a/c10/').get_response(self.test_staticweb)
self.assertEquals(resp.status_int, 200)
self.assert_('Listing of /v1/a/c10/' in resp.body)
resp = Request.blank(
'/v1/a/c10/\xe2\x98\x83/').get_response(self.test_staticweb)
self.assertEquals(resp.status_int, 200)
self.assert_('Listing of /v1/a/c10/\xe2\x98\x83/' in resp.body)
resp = Request.blank(
'/v1/a/c10/\xe2\x98\x83/\xe2\x98\x83/'
).get_response(self.test_staticweb)
self.assertEquals(resp.status_int, 200)
self.assert_(
'Listing of /v1/a/c10/\xe2\x98\x83/\xe2\x98\x83/' in resp.body)
def test_container11subdirmarkerobjectindex(self):
resp = Request.blank('/v1/a/c11/subdir/').get_response(
self.test_staticweb)
self.assertEquals(resp.status_int, 200)
self.assert_('<h2>c11 subdir index</h2>' in resp.body)
def test_container11subdirmarkermatchdirtype(self):
resp = Request.blank('/v1/a/c11a/subdir/').get_response(
self.test_staticweb)
self.assertEquals(resp.status_int, 404)
self.assert_('Index File Not Found' in resp.body)
def test_container11subdirmarkeraltdirtype(self):
resp = Request.blank('/v1/a/c11a/subdir2/').get_response(
self.test_staticweb)
self.assertEquals(resp.status_int, 200)
def test_container11subdirmarkerinvaliddirtype(self):
resp = Request.blank('/v1/a/c11a/subdir3/').get_response(
self.test_staticweb)
self.assertEquals(resp.status_int, 200)
def test_container12unredirectedrequest(self):
resp = Request.blank('/v1/a/c12/').get_response(
self.test_staticweb)
self.assertEquals(resp.status_int, 200)
self.assert_('index file' in resp.body)
def test_container_404_has_css(self):
resp = Request.blank('/v1/a/c13/').get_response(
self.test_staticweb)
self.assertEquals(resp.status_int, 404)
self.assert_('listing.css' in resp.body)
def test_container_404_has_no_css(self):
resp = Request.blank('/v1/a/c7/').get_response(
self.test_staticweb)
self.assertEquals(resp.status_int, 404)
self.assert_('listing.css' not in resp.body)
self.assert_('<style' in resp.body)
def test_container_unicode_stdlib_json(self):
with mock.patch('swift.common.middleware.staticweb.json',
new=stdlib_json):
resp = Request.blank(
'/v1/a/c10/').get_response(self.test_staticweb)
self.assertEquals(resp.status_int, 200)
self.assert_('Listing of /v1/a/c10/' in resp.body)
resp = Request.blank(
'/v1/a/c10/\xe2\x98\x83/').get_response(self.test_staticweb)
self.assertEquals(resp.status_int, 200)
self.assert_('Listing of /v1/a/c10/\xe2\x98\x83/' in resp.body)
resp = Request.blank(
'/v1/a/c10/\xe2\x98\x83/\xe2\x98\x83/'
).get_response(self.test_staticweb)
self.assertEquals(resp.status_int, 200)
self.assert_(
'Listing of /v1/a/c10/\xe2\x98\x83/\xe2\x98\x83/' in resp.body)
def test_subrequest_once_if_possible(self):
resp = Request.blank(
'/v1/a/c4/one.txt').get_response(self.test_staticweb)
self.assertEquals(resp.status_int, 200)
self.assertEquals(resp.headers['x-object-meta-test'], 'value')
self.assertEquals(resp.body, '1')
self.assertEquals(self.app.calls, 1)
if __name__ == '__main__':
unittest.main()
|
apache-2.0
|
nisse3000/pymatgen
|
pymatgen/analysis/magnetism/tests/test_analyzer.py
|
2
|
11626
|
# coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
from __future__ import division, unicode_literals
from pymatgen.core import Specie, Element, Lattice, Structure
from pymatgen.io.cif import CifParser
from pymatgen.analysis.magnetism import *
import os
import unittest
test_dir = os.path.join(os.path.dirname(__file__), "..", "..", "..", "..",
'test_files')
class CollinearMagneticStructureAnalyzerTest(unittest.TestCase):
def setUp(self):
parser = CifParser(os.path.join(test_dir, 'Fe.cif'))
self.Fe = parser.get_structures()[0]
parser = CifParser(os.path.join(test_dir, 'LiFePO4.cif'))
self.LiFePO4 = parser.get_structures()[0]
parser = CifParser(os.path.join(test_dir, 'Fe3O4.cif'))
self.Fe3O4 = parser.get_structures()[0]
parser = CifParser(os.path.join(test_dir, 'magnetic.ncl.example.GdB4.mcif'))
self.GdB4 = parser.get_structures()[0]
parser = CifParser(os.path.join(test_dir, 'magnetic.example.NiO.mcif'))
self.NiO_expt = parser.get_structures()[0]
latt = Lattice.cubic(4.17)
species = ["Ni", "O"]
coords = [[0, 0, 0],
[0.5, 0.5, 0.5]]
self.NiO = Structure.from_spacegroup(225, latt, species, coords)
latt = Lattice([[2.085, 2.085, 0.0],
[0.0, -2.085, -2.085],
[-2.085, 2.085, -4.17]])
species = ["Ni", "Ni", "O", "O"]
coords = [[0.5, 0, 0.5],
[0, 0, 0],
[0.25, 0.5, 0.25],
[0.75, 0.5, 0.75]]
self.NiO_AFM_111 = Structure(latt, species, coords,
site_properties={'magmom': [-5, 5, 0, 0]})
latt = Lattice([[2.085, 2.085, 0],
[0, 0, -4.17],
[-2.085, 2.085, 0]])
species = ["Ni", "Ni", "O", "O"]
coords = [[0.5, 0.5, 0.5],
[0, 0, 0],
[0, 0.5, 0],
[0.5, 0, 0.5]]
self.NiO_AFM_001 = Structure(latt, species, coords,
site_properties={'magmom': [-5, 5, 0, 0]})
latt = Lattice([[2.085, 2.085, 0],
[0, 0, -4.17],
[-2.085, 2.085, 0]])
species = ["Ni", "Ni", "O", "O"]
coords = [[0.5, 0.5, 0.5],
[0, 0, 0],
[0, 0.5, 0],
[0.5, 0, 0.5]]
self.NiO_AFM_001_opposite = Structure(latt, species, coords,
site_properties={'magmom': [5, -5, 0, 0]})
latt = Lattice([[2.085, 2.085, 0],
[0, 0, -4.17],
[-2.085, 2.085, 0]])
species = ["Ni", "Ni", "O", "O"]
coords = [[0.5, 0.5, 0.5],
[0, 0, 0],
[0, 0.5, 0],
[0.5, 0, 0.5]]
self.NiO_unphysical = Structure(latt, species, coords,
site_properties={'magmom': [-3, 0, 0, 0]})
warnings.simplefilter("ignore")
def tearDown(self):
warnings.resetwarnings()
def test_get_representations(self):
# tests to convert between storing magnetic moment information
# on site_properties or on Specie 'spin' property
# test we store magnetic moments on site properties
self.Fe.add_site_property('magmom', [5])
msa = CollinearMagneticStructureAnalyzer(self.Fe)
self.assertEqual(msa.structure.site_properties['magmom'][0], 5)
# and that we can retrieve a spin representaiton
Fe_spin = msa.get_structure_with_spin()
self.assertFalse('magmom' in Fe_spin.site_properties)
self.assertEqual(Fe_spin[0].specie.spin, 5)
# test we can remove magnetic moment information
Fe_none = msa.get_nonmagnetic_structure()
self.assertFalse('magmom' in Fe_spin.site_properties)
# test with disorder on magnetic site
self.Fe[0] = {Specie('Fe', oxidation_state=0, properties={'spin': 5}): 0.5, 'Ni': 0.5}
self.assertRaises(NotImplementedError, CollinearMagneticStructureAnalyzer, self.Fe)
def test_matches(self):
self.assertTrue(self.NiO.matches(self.NiO_AFM_111))
self.assertTrue(self.NiO.matches(self.NiO_AFM_001))
# MSA adds magmoms to Structure, so not equal
msa = CollinearMagneticStructureAnalyzer(self.NiO,
overwrite_magmom_mode="replace_all")
self.assertFalse(msa.matches_ordering(self.NiO))
self.assertFalse(msa.matches_ordering(self.NiO_AFM_111))
self.assertFalse(msa.matches_ordering(self.NiO_AFM_001))
msa = CollinearMagneticStructureAnalyzer(self.NiO_AFM_001,
overwrite_magmom_mode="respect_sign")
self.assertFalse(msa.matches_ordering(self.NiO))
self.assertFalse(msa.matches_ordering(self.NiO_AFM_111))
self.assertTrue(msa.matches_ordering(self.NiO_AFM_001))
self.assertTrue(msa.matches_ordering(self.NiO_AFM_001_opposite))
msa = CollinearMagneticStructureAnalyzer(self.NiO_AFM_111,
overwrite_magmom_mode="respect_sign")
self.assertFalse(msa.matches_ordering(self.NiO))
self.assertTrue(msa.matches_ordering(self.NiO_AFM_111))
self.assertFalse(msa.matches_ordering(self.NiO_AFM_001))
self.assertFalse(msa.matches_ordering(self.NiO_AFM_001_opposite))
def test_modes(self):
mode = "none"
msa = CollinearMagneticStructureAnalyzer(self.NiO,
overwrite_magmom_mode=mode)
magmoms = msa.structure.site_properties['magmom']
self.assertEqual(magmoms, [0, 0])
mode = "respect_sign"
msa = CollinearMagneticStructureAnalyzer(self.NiO_unphysical,
overwrite_magmom_mode=mode)
magmoms = msa.structure.site_properties['magmom']
self.assertEqual(magmoms, [-5, 0, 0, 0])
mode = "respect_zeros"
msa = CollinearMagneticStructureAnalyzer(self.NiO_unphysical,
overwrite_magmom_mode=mode)
magmoms = msa.structure.site_properties['magmom']
self.assertEqual(magmoms, [5, 0, 0, 0])
mode = "replace_all"
msa = CollinearMagneticStructureAnalyzer(self.NiO_unphysical,
overwrite_magmom_mode=mode,
make_primitive=False)
magmoms = msa.structure.site_properties['magmom']
self.assertEqual(magmoms, [5, 5, 0, 0])
mode = "replace_all_if_undefined"
msa = CollinearMagneticStructureAnalyzer(self.NiO,
overwrite_magmom_mode=mode)
magmoms = msa.structure.site_properties['magmom']
self.assertEqual(magmoms, [5, 0])
mode = "normalize"
msa = CollinearMagneticStructureAnalyzer(msa.structure,
overwrite_magmom_mode='normalize')
magmoms = msa.structure.site_properties['magmom']
self.assertEqual(magmoms, [1, 0])
def test_get_ferromagnetic_structure(self):
msa = CollinearMagneticStructureAnalyzer(self.NiO,
overwrite_magmom_mode="replace_all_if_undefined")
s1 = msa.get_ferromagnetic_structure()
s1_magmoms = [float(m) for m in s1.site_properties['magmom']]
s1_magmoms_ref = [5.0, 0.0]
self.assertListEqual(s1_magmoms, s1_magmoms_ref)
msa2 = CollinearMagneticStructureAnalyzer(self.NiO_AFM_111,
overwrite_magmom_mode="replace_all_if_undefined")
s2 = msa.get_ferromagnetic_structure(make_primitive=False)
s2_magmoms = [float(m) for m in s2.site_properties['magmom']]
s2_magmoms_ref = [5.0, 0.0]
self.assertListEqual(s2_magmoms, s2_magmoms_ref)
s2_prim = msa.get_ferromagnetic_structure(make_primitive=True)
self.assertTrue(CollinearMagneticStructureAnalyzer(s1).matches_ordering(s2_prim))
def test_magnetic_properties(self):
msa = CollinearMagneticStructureAnalyzer(self.GdB4)
self.assertFalse(msa.is_collinear)
msa = CollinearMagneticStructureAnalyzer(self.Fe)
self.assertFalse(msa.is_magnetic)
self.Fe.add_site_property('magmom', [5])
msa = CollinearMagneticStructureAnalyzer(self.Fe)
self.assertTrue(msa.is_magnetic)
self.assertTrue(msa.is_collinear)
self.assertEqual(msa.ordering, Ordering.FM)
msa = CollinearMagneticStructureAnalyzer(self.NiO, make_primitive=False,
overwrite_magmom_mode="replace_all_if_undefined")
self.assertEqual(msa.number_of_magnetic_sites, 4)
self.assertEqual(msa.number_of_unique_magnetic_sites(), 1)
self.assertEqual(msa.types_of_magnetic_specie, [Element('Ni')])
self.assertEqual(msa.get_exchange_group_info(), ('Fm-3m', 225))
def test_str(self):
msa = CollinearMagneticStructureAnalyzer(self.NiO_AFM_001)
ref_msa_str = """Structure Summary
Lattice
abc : 2.948635277547903 4.17 2.948635277547903
angles : 90.0 90.0 90.0
volume : 36.2558565
A : 2.085 2.085 0.0
B : 0.0 0.0 -4.17
C : -2.085 2.085 0.0
Magmoms Sites
+5.00 PeriodicSite: Ni (0.0000, 0.0000, 0.0000) [0.0000, 0.0000, 0.0000]
PeriodicSite: O (0.0000, 0.0000, -2.0850) [0.0000, 0.5000, 0.0000]
PeriodicSite: O (0.0000, 2.0850, 0.0000) [0.5000, 0.0000, 0.5000]
-5.00 PeriodicSite: Ni (0.0000, 2.0850, -2.0850) [0.5000, 0.5000, 0.5000]"""
# just compare lines form 'Magmoms Sites',
# since lattice param string can vary based on machine precision
self.assertEqual("\n".join(str(msa).split("\n")[-5:-1]),
"\n".join(ref_msa_str.split("\n")[-5:-1]))
def test_round_magmoms(self):
struct = self.NiO_AFM_001.copy()
struct.add_site_property('magmom', [-5.0143, -5.02, 0.147, 0.146])
msa = CollinearMagneticStructureAnalyzer(struct, round_magmoms=0.001, make_primitive=False)
self.assertTrue(np.allclose(msa.magmoms, [-5.0171, -5.0171, 0.1465, 0.1465]))
self.assertAlmostEqual(msa.magnetic_species_and_magmoms['Ni'], 5.0171)
self.assertAlmostEqual(msa.magnetic_species_and_magmoms['O'], 0.1465)
struct.add_site_property('magmom', [-5.0143, 4.5, 0.147, 0.146])
msa = CollinearMagneticStructureAnalyzer(struct, round_magmoms=0.001, make_primitive=False)
self.assertTrue(np.allclose(msa.magmoms, [-5.0143, 4.5, 0.1465, 0.1465]))
self.assertAlmostEqual(msa.magnetic_species_and_magmoms['Ni'][0], 4.5)
self.assertAlmostEqual(msa.magnetic_species_and_magmoms['Ni'][1], 5.0143)
self.assertAlmostEqual(msa.magnetic_species_and_magmoms['O'], 0.1465)
class MagneticDeformationTest(unittest.TestCase):
def test_magnetic_deformation(self):
test_structs = loadfn(os.path.join(test_dir, 'magnetic_deformation.json'))
mag_def = magnetic_deformation(test_structs[0], test_structs[1])
self.assertEqual(mag_def.type, "NM-FM")
self.assertAlmostEqual(mag_def.deformation, 5.0130859485170971)
if __name__ == '__main__':
unittest.main()
|
mit
|
wdxtub/Patriots
|
static/code/sentiment_lstm.py
|
1
|
10671
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import #导入3.x的特征函数
from __future__ import print_function
import yaml
import sys
reload(sys)
sys.setdefaultencoding('utf8')
import pandas as pd #导入Pandas
import numpy as np #导入Numpy
import jieba #导入结巴分词
import h5py, pickle, os, datetime
from keras.models import model_from_json, save_model
from keras.preprocessing import sequence
from keras.optimizers import SGD, RMSprop, Adagrad
from keras.utils import np_utils
from keras.models import Sequential, model_from_yaml
from keras.layers.core import Dense, Dropout, Activation
from keras.layers.embeddings import Embedding
from keras.layers.recurrent import LSTM, GRU
sys.setrecursionlimit(1000000)
# save model http://www.linuxdiyf.com/linux/22940.html
# http://www.linuxdiyf.com/linux/22937.html
#
# http://spaces.ac.cn/archives/3414/
# https://github.com/BUPTLdy/Sentiment-Analysis
# http://blog.sina.com.cn/s/blog_735f29100102wjwu.html
# http://blog.csdn.net/weixin_36541072/article/details/53786020
# https://keras-cn.readthedocs.io/en/latest/getting_started/concepts/
# https://keras-cn.readthedocs.io/en/latest/getting_started/keras_linux/
# 设置参数
maxlen = 50
lstm_batch_size = 16
lstm_epochs = 15
datadir = ''
modeldir = '../model/lstm_didi'
testdir = ''
# 加载训练文件
def loadfile():
print("读取语料数据")
neg=pd.read_excel(datadir + '/neg.xls',header=None,index=None)
mid=pd.read_excel(datadir + '/pos.xls',header=None,index=None)
print("读取训练语料完毕")
print("给训练语料贴上标签")
mid['mark']=1
neg['mark']=0
print("合并语料")
pn=pd.concat([mid,neg],ignore_index=True)
neglen=len(neg)
midlen=len(mid) #计算语料数目
print('neg count:' + str(neglen))
print('pos count:' + str(midlen))
return pn
def tokenizer(text):
cw = lambda x: list(jieba.cut(x)) #定义分词函数
text['words'] = text[0].apply(cw)
return text
def generatedict(text):
# 计算词典并保存
d2v_train = pd.concat([text['words']], ignore_index = True)
w = [] #将所有词语整合在一起
for i in d2v_train:
w.extend(i)
dict = pd.DataFrame(pd.Series(w).value_counts()) #统计词的出现次数
del w,d2v_train
dict['id'] = list(range(1,len(dict)+1))
# 这个 dict 需要保存下来
outputFile = modeldir + '/dict.data'
fw = open(outputFile, 'w')
pickle.dump(dict,fw)
fw.close()
return dict
def word2index(text, dict):
get_sent = lambda x: list(dict['id'][x])
text['sent'] = text['words'].apply(get_sent)
print("Pad sequences (samples x time)")
text['sent'] = list(sequence.pad_sequences(text['sent'], maxlen=maxlen))
return text
def getdata(text):
x = np.array(list(text['sent']))[::2] #训练集
y = np.array(list(text['mark']))[::2]
xt = np.array(list(text['sent']))[1::2] #测试集
yt = np.array(list(text['mark']))[1::2]
xa = np.array(list(text['sent'])) #全集
ya = np.array(list(text['mark']))
return x,y,xt,yt,xa,ya
def train_lstm(dict,x,y,xt,yt):
model = Sequential()
model.add(Embedding(len(dict)+1, 256, input_length=maxlen))
model.add(LSTM(output_dim=128, activation='sigmoid', inner_activation='hard_sigmoid'))
model.add(Dropout(0.5))
model.add(Dense(1))
# model.add(Dense(input_dim = 32, output_dim = 1))
model.add(Activation('sigmoid'))
print ('模型构建完成')
#model.compile(loss='binary_crossentropy', optimizer='adam', class_mode="binary")
model.compile(optimizer='rmsprop', loss='binary_crossentropy', metrics=['accuracy'])
print ("模型编译完成")
model.fit(x, y, batch_size=lstm_batch_size, epochs=lstm_epochs, verbose=0)
print ("模型训练完成")
print ("保存模型")
yaml_string = model.to_yaml()
with open(modeldir + '/lstm.yml', 'w') as outfile:
outfile.write( yaml.dump(yaml_string, default_flow_style=True) )
model.save_weights(modeldir + '/lstm.h5')
print ("测试集评估")
score = model.evaluate(xt, yt, verbose=0)
print ("准确率:",score[1])
return model
def saveresult(model, xt, text):
classes = model.predict_classes(xt, verbose=1)
proba = model.predict_proba(xt, verbose=1)
print ("\n输出结果")
filename = 'result.txt'
f = open('result.txt', 'w')
i = 1
j = 0
for c in classes:
f.write(str(c))
f.write(",")
f.write(str(proba[j]))
f.write(",")
line = "".join(text['words'][i])
f.write(line.encode('utf-8'))
f.write("\n")
i = i + 2
j = j + 1
f.close()
print ("\n排序结果")
num = 1
result = []
with open(filename, 'r') as f:
while True:
line = f.readline()
if not line:
break
print("processing line #" + str(num))
num = num + 1
arr = line.split(',')
item = (int(arr[0][1:-1]), float(arr[1][2:-1]), "".join(arr[2:]))
result.append(item)
result.sort(key=lambda tup:tup[1])
print(len(result))
f = open('sorted.txt', 'w')
for item in result:
f.write(str(item[0]))
f.write(",")
f.write(str(item[1]))
f.write(",")
f.write(item[2])
print("done")
def loaddict():
fr = open(modeldir + '/dict.data')
dict = pickle.load(fr)
return dict
#训练模型,并保存
def train():
print('Loading Data...')
pn = loadfile()
print('Tokenising...')
pn = tokenizer(pn)
print('Generating Dict...')
dict = generatedict(pn)
print('Word to Index...')
pn = word2index(pn, dict)
print('Preparing data...')
x,y,xt,yt,xa,ya = getdata(pn)
print('Model Stage...')
# 这里训练全量模型
model = train_lstm(dict, xa, ya, xt, yt)
#print('Save Test Result...')
#saveresult(model, xt, pn)
print("Done")
def batchtest(filepath):
dict = loaddict()
with open(modeldir + '/lstm.yml', 'r') as f:
yaml_string = yaml.load(f)
model = model_from_yaml(yaml_string)
model.load_weights(modeldir + '/lstm.h5')
model.compile(optimizer='rmsprop', loss='binary_crossentropy', metrics=['accuracy'])
# 读取测试文件
# 开始一个 for 循环,外面要统计
test_count = 0
correct_count = 0
if os.path.exists(filepath):
f = open(filepath, 'r')
try:
lines = f.readlines()
for line in lines:
if len(line) <= 0:
continue
else:
arr = line.split(',')
label = arr[0]
test_count += 1
text = ",".join(arr[1:])
textarr = list(jieba.cut(text))
textvec = []
add = 1
for item in textarr:
# 如果不在词典里,则直接丢弃(因为出现的次数也非常少,不考虑)
if item in dict['id']:
textvec.append(dict['id'][item])
textvec = pd.Series(textvec)
textvec = sequence.pad_sequences([textvec], maxlen=maxlen)
# 概率
proba = model.predict_proba(textvec, verbose=0)
# 判断是否计算正确
for s in proba:
if s[0] > 0.5 and label == '1' or s[0] <= 0.5 and label == '0':
correct_count += 1
print('[' + str(test_count) + ']: ' + label + ' ' + str(s[0]) + ' ' + text[:-1])
else:
print('[' + str(test_count) + ']:[x] ' + label + ' ' + str(s[0]) + ' ' + text[:-1])
finally:
f.close() # 确保关闭
return correct_count, test_count
# 批量预测,减少内存使用,传入一个字符串数组
def predict_arr(arr):
dict = loaddict()
probas = []
with open(modeldir + '/lstm.yml', 'r') as f:
yaml_string = yaml.load(f)
model = model_from_yaml(yaml_string)
model.load_weights(modeldir + '/lstm.h5')
model.compile(optimizer='rmsprop', loss='binary_crossentropy', metrics=['accuracy'])
for s in arr:
textarr = list(jieba.cut(s))
textvec = []
add = 1
for item in textarr:
# 如果不在词典里,则直接丢弃(因为出现的次数也非常少,不考虑)
if item in dict['id']:
textvec.append(dict['id'][item])
textvec = pd.Series(textvec)
textvec = sequence.pad_sequences([textvec], maxlen=maxlen)
proba = model.predict_proba(textvec, verbose=0)
probas.append(proba[0][0])
return probas
def predict(text):
print('Loading Dict Data..')
dict = loaddict()
# 把每个词转化为在词典里的数字,更新词典的计数(参考上面的格式)
textarr = list(jieba.cut(text))
textvec = []
add = 1
for item in textarr:
# 如果不在词典里,则直接丢弃(因为出现的次数也非常少,不考虑)
if item in dict['id']:
textvec.append(dict['id'][item])
textvec = pd.Series(textvec)
textvec = sequence.pad_sequences([textvec], maxlen=maxlen)
# ----
print('loading model......')
with open(modeldir + '/lstm.yml', 'r') as f:
yaml_string = yaml.load(f)
model = model_from_yaml(yaml_string)
print('loading weights......')
model.load_weights(modeldir + '/lstm.h5')
model.compile(optimizer='rmsprop', loss='binary_crossentropy', metrics=['accuracy'])
# 至此模型已经载入完成,可以进行预测
#classes = model.predict_classes(textvec, verbose=1)
proba = model.predict_proba(textvec, verbose=0)
# 这里因为知识图谱暂时改变输入格式
#for s in proba:
# if s[0] > 0.5:
# print('positive ' + str(s[0]) + ' ' + text)
# else:
# print('negative ' + str(s[0]) + ' ' + text)
return proba[0][0]
if __name__=='__main__':
argvs_length = len(sys.argv)
if argvs_length >= 4:
argvs = sys.argv
action = argvs[1]
if action == 'train': # 训练
datadir = argvs[2]
modeldir = argvs[3]
begin = datetime.datetime.now()
train()
end = datetime.datetime.now()
# 统计训练时间、模型大小,写入到 result.txt 中
with open(modeldir + '/result.txt', "w") as f:
f.write('训练时长: ' + str(end-begin))
elif action == 'predict':
modeldir = argvs[2]
sentence = " ".join(argvs[3:])
predict(sentence)
elif action == 'test':
datadir = argvs[2]
modeldir = argvs[3]
testdir = argvs[4]
begin = datetime.datetime.now()
result = batchtest(datadir+'/test.txt')
end = datetime.datetime.now()
# 统计训练时间、模型大小,写入到 result.txt 中
with open(testdir + '/result.txt', "w") as f:
f.write('测试时长: ' + str(end-begin) + '\n')
f.write('正确率: ' + str(float(result[0])/float(result[1])) + ' (' + str(result[0]) + '/' + str(result[1]) + ')\n')
|
gpl-3.0
|
xxshutong/openerp-7.0
|
openerp/addons/purchase/res_config.py
|
19
|
5621
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Business Applications
# Copyright (C) 2004-2012 OpenERP S.A. (<http://openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
from openerp import pooler
from openerp.tools.translate import _
class purchase_config_settings(osv.osv_memory):
_name = 'purchase.config.settings'
_inherit = 'res.config.settings'
_columns = {
'default_invoice_method': fields.selection(
[('manual', 'Based on purchase order lines'),
('picking', 'Based on receptions'),
('order', 'Pre-generate draft invoices based on purchase orders'),
], 'Default invoicing control method', required=True, default_model='purchase.order'),
'group_purchase_pricelist':fields.boolean("Manage pricelist per supplier",
implied_group='product.group_purchase_pricelist',
help="""Allows to manage different prices based on rules per category of Supplier.
Example: 10% for retailers, promotion of 5 EUR on this product, etc."""),
'group_uom':fields.boolean("Manage different units of measure for products",
implied_group='product.group_uom',
help="""Allows you to select and maintain different units of measure for products."""),
'group_costing_method':fields.boolean("Compute product cost price based on average cost",
implied_group='product.group_costing_method',
help="""Allows you to compute product cost price based on average cost."""),
'module_warning': fields.boolean("Alerts by products or supplier",
help="""Allow to configure notification on products and trigger them when a user wants to purchase a given product or a given supplier.
Example: Product: this product is deprecated, do not purchase more than 5.
Supplier: don't forget to ask for an express delivery."""),
'module_purchase_double_validation': fields.boolean("Force two levels of approvals",
help="""Provide a double validation mechanism for purchases exceeding minimum amount.
This installs the module purchase_double_validation."""),
'module_purchase_requisition': fields.boolean("Manage purchase requisitions",
help="""Purchase Requisitions are used when you want to request quotations from several suppliers for a given set of products.
You can configure per product if you directly do a Request for Quotation
to one supplier or if you want a purchase requisition to negotiate with several suppliers."""),
'module_purchase_analytic_plans': fields.boolean('Use multiple analytic accounts on purchase orders',
help ="""Allows the user to maintain several analysis plans. These let you split lines on a purchase order between several accounts and analytic plans.
This installs the module purchase_analytic_plans."""),
'group_analytic_account_for_purchases': fields.boolean('Analytic accounting for purchases',
implied_group='purchase.group_analytic_accounting',
help="Allows you to specify an analytic account on purchase orders."),
}
_defaults = {
'default_invoice_method': 'manual',
}
def onchange_purchase_analytic_plans(self, cr, uid, ids, module_purchase_analytic_plans, context=None):
""" change group_analytic_account_for_purchases following module_purchase_analytic_plans """
if not module_purchase_analytic_plans:
return {}
return {'value': {'group_analytic_account_for_purchases': module_purchase_analytic_plans}}
class account_config_settings(osv.osv_memory):
_inherit = 'account.config.settings'
_columns = {
'module_purchase_analytic_plans': fields.boolean('Use multiple analytic accounts on orders',
help ="""Allows the user to maintain several analysis plans. These let you split lines on a purchase order between several accounts and analytic plans.
This installs the module purchase_analytic_plans."""),
'group_analytic_account_for_purchases': fields.boolean('Analytic accounting for purchases',
implied_group='purchase.group_analytic_accounting',
help="Allows you to specify an analytic account on purchase orders."),
}
def onchange_purchase_analytic_plans(self, cr, uid, ids, module_purchase_analytic_plans, context=None):
""" change group_analytic_account_for_purchases following module_purchase_analytic_plans """
if not module_purchase_analytic_plans:
return {}
return {'value': {'group_analytic_account_for_purchases': module_purchase_analytic_plans}}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
agpl-3.0
|
ofir123/CouchPotatoServer
|
libs/rtorrent/rpc/__init__.py
|
14
|
10808
|
# Copyright (c) 2013 Chris Lucas, <[email protected]>
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
import inspect
import rtorrent
import re
from rtorrent.common import bool_to_int, convert_version_tuple_to_str,\
safe_repr
from rtorrent.err import MethodError
from rtorrent.compat import xmlrpclib
def get_varname(rpc_call):
"""Transform rpc method into variable name.
@newfield example: Example
@example: if the name of the rpc method is 'p.get_down_rate', the variable
name will be 'down_rate'
"""
# extract variable name from xmlrpc func name
r = re.search(
"([ptdf]\.|system\.|get\_|is\_|set\_)+([^=]*)", rpc_call, re.I)
if r:
return(r.groups()[-1])
else:
return(None)
def _handle_unavailable_rpc_method(method, rt_obj):
msg = "Method isn't available."
if rt_obj.connection._get_client_version_tuple() < method.min_version:
msg = "This method is only available in " \
"RTorrent version v{0} or later".format(
convert_version_tuple_to_str(method.min_version))
raise MethodError(msg)
class DummyClass:
def __init__(self):
pass
class Method:
"""Represents an individual RPC method"""
def __init__(self, _class, method_name,
rpc_call, docstring=None, varname=None, **kwargs):
self._class = _class # : Class this method is associated with
self.class_name = _class.__name__
self.method_name = method_name # : name of public-facing method
self.rpc_call = rpc_call # : name of rpc method
self.docstring = docstring # : docstring for rpc method (optional)
self.varname = varname # : variable for the result of the method call, usually set to self.varname
self.min_version = kwargs.get("min_version", (
0, 0, 0)) # : Minimum version of rTorrent required
self.boolean = kwargs.get("boolean", False) # : returns boolean value?
self.post_process_func = kwargs.get(
"post_process_func", None) # : custom post process function
self.aliases = kwargs.get(
"aliases", []) # : aliases for method (optional)
self.required_args = []
#: Arguments required when calling the method (not utilized)
self.method_type = self._get_method_type()
if self.varname is None:
self.varname = get_varname(self.rpc_call)
assert self.varname is not None, "Couldn't get variable name."
def __repr__(self):
return safe_repr("Method(method_name='{0}', rpc_call='{1}')",
self.method_name, self.rpc_call)
def _get_method_type(self):
"""Determine whether method is a modifier or a retriever"""
if self.method_name[:4] == "set_": return('m') # modifier
else:
return('r') # retriever
def is_modifier(self):
if self.method_type == 'm':
return(True)
else:
return(False)
def is_retriever(self):
if self.method_type == 'r':
return(True)
else:
return(False)
def is_available(self, rt_obj):
if rt_obj.connection._get_client_version_tuple() < self.min_version or \
self.rpc_call not in rt_obj.connection._get_rpc_methods():
return(False)
else:
return(True)
class Multicall:
def __init__(self, class_obj, **kwargs):
self.class_obj = class_obj
if class_obj.__class__.__name__ == "RTorrent":
self.rt_obj = class_obj
else:
self.rt_obj = class_obj._rt_obj
self.calls = []
def add(self, method, *args):
"""Add call to multicall
@param method: L{Method} instance or name of raw RPC method
@type method: Method or str
@param args: call arguments
"""
# if a raw rpc method was given instead of a Method instance,
# try and find the instance for it. And if all else fails, create a
# dummy Method instance
if isinstance(method, str):
result = find_method(method)
# if result not found
if result == -1:
method = Method(DummyClass, method, method)
else:
method = result
# ensure method is available before adding
if not method.is_available(self.rt_obj):
_handle_unavailable_rpc_method(method, self.rt_obj)
self.calls.append((method, args))
def list_calls(self):
for c in self.calls:
print(c)
def call(self):
"""Execute added multicall calls
@return: the results (post-processed), in the order they were added
@rtype: tuple
"""
m = xmlrpclib.MultiCall(self.rt_obj._get_conn())
for call in self.calls:
method, args = call
rpc_call = getattr(method, "rpc_call")
getattr(m, rpc_call)(*args)
results = m()
results = tuple(results)
results_processed = []
for r, c in zip(results, self.calls):
method = c[0] # Method instance
result = process_result(method, r)
results_processed.append(result)
# assign result to class_obj
exists = hasattr(self.class_obj, method.varname)
if not exists or not inspect.ismethod(getattr(self.class_obj, method.varname)):
setattr(self.class_obj, method.varname, result)
return(tuple(results_processed))
def call_method(class_obj, method, *args):
"""Handles single RPC calls
@param class_obj: Peer/File/Torrent/Tracker/RTorrent instance
@type class_obj: object
@param method: L{Method} instance or name of raw RPC method
@type method: Method or str
"""
if method.is_retriever():
args = args[:-1]
else:
assert args[-1] is not None, "No argument given."
if class_obj.__class__.__name__ == "RTorrent":
rt_obj = class_obj
else:
rt_obj = class_obj._rt_obj
# check if rpc method is even available
if not method.is_available(rt_obj):
_handle_unavailable_rpc_method(method, rt_obj)
m = Multicall(class_obj)
m.add(method, *args)
# only added one method, only getting one result back
ret_value = m.call()[0]
####### OBSOLETE ##########################################################
# if method.is_retriever():
# #value = process_result(method, ret_value)
# value = ret_value #MultiCall already processed the result
# else:
# # we're setting the user's input to method.varname
# # but we'll return the value that xmlrpc gives us
# value = process_result(method, args[-1])
##########################################################################
return(ret_value)
def find_method(rpc_call):
"""Return L{Method} instance associated with given RPC call"""
method_lists = [
rtorrent.methods,
rtorrent.file.methods,
rtorrent.tracker.methods,
rtorrent.peer.methods,
rtorrent.torrent.methods,
]
for l in method_lists:
for m in l:
if m.rpc_call.lower() == rpc_call.lower():
return(m)
return(-1)
def process_result(method, result):
"""Process given C{B{result}} based on flags set in C{B{method}}
@param method: L{Method} instance
@type method: Method
@param result: result to be processed (the result of given L{Method} instance)
@note: Supported Processing:
- boolean - convert ones and zeros returned by rTorrent and
convert to python boolean values
"""
# handle custom post processing function
if method.post_process_func is not None:
result = method.post_process_func(result)
# is boolean?
if method.boolean:
if result in [1, '1']:
result = True
elif result in [0, '0']:
result = False
return(result)
def _build_rpc_methods(class_, method_list):
"""Build glorified aliases to raw RPC methods"""
instance = None
if not inspect.isclass(class_):
instance = class_
class_ = instance.__class__
for m in method_list:
class_name = m.class_name
if class_name != class_.__name__:
continue
if class_name == "RTorrent":
caller = lambda self, arg = None, method = m:\
call_method(self, method, bool_to_int(arg))
elif class_name == "Torrent":
caller = lambda self, arg = None, method = m:\
call_method(self, method, self.rpc_id,
bool_to_int(arg))
elif class_name in ["Tracker", "File"]:
caller = lambda self, arg = None, method = m:\
call_method(self, method, self.rpc_id,
bool_to_int(arg))
elif class_name == "Peer":
caller = lambda self, arg = None, method = m:\
call_method(self, method, self.rpc_id,
bool_to_int(arg))
elif class_name == "Group":
caller = lambda arg = None, method = m: \
call_method(instance, method, bool_to_int(arg))
if m.docstring is None:
m.docstring = ""
# print(m)
docstring = """{0}
@note: Variable where the result for this method is stored: {1}.{2}""".format(
m.docstring,
class_name,
m.varname)
caller.__doc__ = docstring
for method_name in [m.method_name] + list(m.aliases):
if instance is None:
setattr(class_, method_name, caller)
else:
setattr(instance, method_name, caller)
|
gpl-3.0
|
keon/algorithms
|
algorithms/arrays/merge_intervals.py
|
1
|
2055
|
"""
In mathematics, a (real) interval is a set of real
numbers with the property that any number that lies
between two numbers in the set is also included in the set.
"""
class Interval:
"""
A set of real numbers with methods to determine if other
numbers are included in the set.
Includes related methods to merge and print interval sets.
"""
def __init__(self, start=0, end=0):
self.start = start
self.end = end
def __repr__(self):
return "Interval ({}, {})".format(self.start, self.end)
def __iter__(self):
return iter(range(self.start, self.end))
def __getitem__(self, index):
if index < 0:
return self.end + index
return self.start + index
def __len__(self):
return self.end - self.start
def __contains__(self, item):
if self.start >= item >= self.end:
return True
return False
def __eq__(self, other):
if self.start == other.start and self.end == other.end:
return True
return False
def as_list(self):
""" Return interval as list. """
return list(self)
@staticmethod
def merge(intervals):
""" Merge two intervals into one. """
out = []
for i in sorted(intervals, key=lambda i: i.start):
if out and i.start <= out[-1].end:
out[-1].end = max(out[-1].end, i.end)
else:
out += i,
return out
@staticmethod
def print_intervals(intervals):
""" Print out the intervals. """
res = []
for i in intervals:
res.append(repr(i))
print("".join(res))
def merge_intervals(intervals):
""" Merge intervals in the form of a list. """
if intervals is None:
return None
intervals.sort(key=lambda i: i[0])
out = [intervals.pop(0)]
for i in intervals:
if out[-1][-1] >= i[0]:
out[-1][-1] = max(out[-1][-1], i[-1])
else:
out.append(i)
return out
|
mit
|
mlperf/training_results_v0.7
|
Fujitsu/benchmarks/resnet/implementations/implementation_open/mxnet/benchmark/opperf/nd_operations/nn_optimizer_operators.py
|
2
|
2216
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import mxnet as mx
from benchmark.opperf.utils.benchmark_utils import run_op_benchmarks
from benchmark.opperf.utils.op_registry_utils import get_all_optimizer_operators
"""Performance benchmark tests for MXNet Neural Network Optimizer Update Operators.
1. Stochastic Gradient Descent (SGD)
1.1 mp_sgd_update
1.2 sgd_mom_update
1.3 signsgd_update
1.4 mp_sgd_mom_update
1.5 sgd_update
2. signum_update
3. rmspropalex_update
4. ftml_update
5. rmsprop_update
6. ftrl_update
7. adam_update
"""
def run_optimizer_operators_benchmarks(ctx=mx.cpu(), dtype='float32', profiler='native', warmup=25, runs=100):
"""Runs benchmarks with the given context and precision (dtype) for all the neural network
optimizer update operators in MXNet.
Parameters
----------
ctx: mx.ctx
Context to run benchmarks
dtype: str, default 'float32'
Precision to use for benchmarks
warmup: int, default 25
Number of times to run for warmup
runs: int, default 100
Number of runs to capture benchmark results
Returns
-------
Dictionary of results. Key -> Name of the operator, Value -> Benchmark results.
"""
# Fetch all optimizer operators
mx_optimizer_ops = get_all_optimizer_operators()
# Run benchmarks
mx_optimizer_op_results = run_op_benchmarks(mx_optimizer_ops, dtype, ctx, profiler, warmup, runs)
return mx_optimizer_op_results
|
apache-2.0
|
jeremycline/pulp
|
server/pulp/server/event/http.py
|
9
|
2330
|
"""
Forwards events to a HTTP call. The configuration used by this notifier
is as follows:
url
Full URL to contact with the event data. A POST request will be made to this
URL with the contents of the events in the body.
Eventually this should be enhanced to support authentication credentials as well.
"""
import base64
import httplib
import logging
import threading
from pulp.server.compat import json, json_util
TYPE_ID = 'http'
_logger = logging.getLogger(__name__)
def handle_event(notifier_config, event):
# fire the actual http push function off in a separate thread to keep
# pulp from blocking or deadlocking due to the tasking subsystem
data = event.data()
_logger.info(data)
body = json.dumps(data, default=json_util.default)
thread = threading.Thread(target=_send_post, args=[notifier_config, body])
thread.setDaemon(True)
thread.start()
def _send_post(notifier_config, body):
# Basic headers
headers = {'Accept': 'application/json',
'Content-Type': 'application/json'}
# Parse the URL for the pieces we need
if 'url' not in notifier_config or not notifier_config['url']:
_logger.warn('HTTP notifier configured without a URL; cannot fire event')
return
url = notifier_config['url']
try:
scheme, empty, server, path = url.split('/', 3)
except ValueError:
_logger.warn('Improperly configured post_sync_url: %(u)s' % {'u': url})
return
connection = _create_connection(scheme, server)
# Process authentication
if 'username' in notifier_config and 'password' in notifier_config:
raw = ':'.join((notifier_config['username'], notifier_config['password']))
encoded = base64.encodestring(raw)[:-1]
headers['Authorization'] = 'Basic ' + encoded
connection.request('POST', '/' + path, body=body, headers=headers)
response = connection.getresponse()
if response.status != httplib.OK:
error_msg = response.read()
_logger.warn('Error response from HTTP notifier: %(e)s' % {'e': error_msg})
connection.close()
def _create_connection(scheme, server):
if scheme.startswith('https'):
connection = httplib.HTTPSConnection(server)
else:
connection = httplib.HTTPConnection(server)
return connection
|
gpl-2.0
|
Judystudy/gooderp_addons
|
warehouse/tests/test_inventory.py
|
5
|
10903
|
# -*- coding: utf-8 -*-
from odoo.tests.common import TransactionCase
from odoo.exceptions import UserError
class TestInventory(TransactionCase):
def setUp(self):
super(TestInventory, self).setUp()
self.env.ref('core.goods_category_1').account_id = self.env.ref(
'finance.account_goods').id
self.env.ref('warehouse.wh_in_whin1').date = '2016-02-06'
self.env.ref('warehouse.wh_in_whin3').date = '2016-02-06'
self.others_in = self.browse_ref('warehouse.wh_in_whin1')
self.others_in_2 = self.browse_ref('warehouse.wh_in_whin3')
self.goods_mouse = self.browse_ref('goods.mouse')
self.sh_warehouse = self.browse_ref('warehouse.sh_stock')
# 创建一个临时的一个库存调拨,将1个商品调拨到上海仓库
self.temp_mouse_in = self.env['wh.move.line'].with_context({
'type': 'in',
}).create({
'move_id': self.others_in.move_id.id,
'goods_id': self.goods_mouse.id,
'uom_id': self.goods_mouse.uom_id.id,
'uos_id': self.goods_mouse.uos_id.id,
'warehouse_dest_id': self.sh_warehouse.id,
'goods_qty': 1,
'goods_uos_qty': self.goods_mouse.anti_conversion_unit(1),
'cost_unit': 30,
'lot': 'MOUSE0001',
})
# 商品 实际数量 实际辅助数量
# 键鼠套装 96 2
# 鼠标 1 1
# 网线 48 1
self.temp_mouse_in.location_id = self.env.ref('warehouse.b001_location').id
self.others_in.approve_order()
self.others_in_2.approve_order()
self.temp_mouse_in.action_done()
# 创建一个临时的库存调拨,此时数量为0,但是辅助数量为1
self.temp_mouse_in_zero_qty = self.env['wh.move.line'].with_context({
'type': 'in',
}).create({
'move_id': self.others_in.move_id.id,
'goods_id': self.goods_mouse.id,
'uom_id': self.goods_mouse.uom_id.id,
'uos_id': self.goods_mouse.uos_id.id,
'warehouse_dest_id': self.sh_warehouse.id,
'goods_qty': 0,
'goods_uos_qty': 0,
'cost_unit': 30,
'lot': 'MOUSE0002',
})
self.temp_mouse_in_zero_qty.action_done()
self.inventory = self.env['wh.inventory'].create({
'warehouse_id': self.browse_ref('warehouse.hd_stock').id,
})
self.inventory.query_inventory()
def test_query_inventory(self):
# 盘点单查询的结果必须和每个商品单据查询的结果一致
for line in self.inventory.line_ids:
goods_stock = line.goods_id.get_stock_qty()[0]
self.assertEqual(goods_stock.get('warehouse'),
line.warehouse_id.name)
if line.goods_id.name == u'网线': # 网线在途移库 120个,盘点时应减去
self.assertEqual(goods_stock.get('qty') - 120, line.real_qty)
else:
self.assertEqual(goods_stock.get('qty'), line.real_qty)
# 当指定仓库的时候,选择的行必须是该仓库的
self.inventory.warehouse_id = self.sh_warehouse
self.inventory.query_inventory()
for line in self.inventory.line_ids:
self.assertEqual(line.warehouse_id, self.sh_warehouse)
# 指定商品的时候,选择的行必须是该商品的
self.inventory.goods = [4, self.goods_mouse.id] # u'鼠标'
self.inventory.query_inventory()
for line in self.inventory.line_ids:
self.assertEqual(line.goods_id.name, u'鼠标')
self.inventory.unlink()
self.assertTrue(not self.inventory.exists())
def test_query_inventory_transfer_order(self):
'''盘点单查询的盘点数量不应该包含移库在途的,在途移库数量恰好等于仓库中数量'''
internal_order = self.env.ref('warehouse.wh_internal_whint0')
for line in internal_order.line_out_ids:
line.goods_qty = 48
inventory = self.env['wh.inventory'].create({
'warehouse_id': self.browse_ref('warehouse.hd_stock').id,
'goods': u'网线',
})
inventory.query_inventory()
def test_generate_inventory(self):
for line in self.inventory.line_ids:
if line.goods_id.name == u'键鼠套装':
keyboard_mouse = line
elif line.goods_id.name == u'鼠标':
mouse = line
else:
cable = line
# 不输入任何值的时候的onchange_qty会讲lot_type设置为nothing
mouse.onchange_qty()
self.assertEqual(mouse.lot_type, 'nothing')
# 实际数量小与系统库存一个的时候,差异数量为-1
mouse.inventory_qty = mouse.real_qty - 1
mouse.onchange_qty()
self.assertEqual(mouse.difference_qty, -1)
self.assertEqual(mouse.lot_type, 'out')
# 实际数量大与系统库存一个的时候,差异数量为1
mouse.inventory_qty = mouse.real_qty + 1
mouse.onchange_qty()
self.assertEqual(mouse.difference_qty, 1)
self.assertEqual(mouse.lot_type, 'in')
# 对于强制为1的商品,只能添加或减少一个商品
warning = {'warning': {
'title': u'警告',
'message': u'商品上设置了序号为1,此时一次只能盘亏或盘盈一个商品数量',
}}
mouse.inventory_qty = mouse.real_qty + 2
self.assertEqual(mouse.onchange_qty(), warning)
# 实际辅助数量改变的时候,实际数量应该跟着改变
mouse.inventory_uos_qty = mouse.real_uos_qty + 1
mouse.onchange_uos_qty()
self.assertEqual(mouse.goods_id.conversion_unit(
mouse.inventory_uos_qty), mouse.inventory_qty)
mouse.line_role_back()
mouse.inventory_qty = mouse.real_qty + 1
mouse.onchange_qty()
cable.inventory_qty = cable.real_qty - 1
cable.onchange_qty()
# 此时鼠标数量+1,网线数量-1,生成一个鼠标的入库单,和网线的出库单
self.inventory.generate_inventory()
self.assertTrue(self.inventory.out_id)
self.assertTrue(self.inventory.in_id)
# 验证商品
self.assertEqual(
self.inventory.out_id.line_out_ids.goods_id, cable.goods_id)
self.assertEqual(
self.inventory.in_id.line_in_ids.goods_id, mouse.goods_id)
# 验证数量
self.assertEqual(self.inventory.out_id.line_out_ids.goods_qty, 1)
self.assertEqual(self.inventory.in_id.line_in_ids.goods_qty, 1)
# 重新盘点的时候相关的出入库单的单据必须未审核
self.inventory.in_id.approve_order()
with self.assertRaises(UserError):
self.inventory.requery_inventory()
self.inventory.in_id.cancel_approved_order()
self.inventory.requery_inventory()
self.inventory.generate_inventory()
self.inventory.out_id.approve_order()
self.inventory.in_id.approve_order()
# 相关的出入库单据完成后,盘点单应该自动完成
self.assertEqual(self.inventory.state, 'done')
# 完成的单据不应该被删除
with self.assertRaises(UserError):
self.inventory.unlink()
results = self.inventory.open_in()
real_results = {
'type': 'ir.actions.act_window',
'res_model': 'wh.in',
'view_mode': 'form',
'res_id': self.inventory.in_id.id,
}
self.assertEqual(results, real_results)
results = self.inventory.open_out()
real_results = {
'type': 'ir.actions.act_window',
'res_model': 'wh.out',
'view_mode': 'form',
'res_id': self.inventory.out_id.id,
}
self.assertEqual(results, real_results)
def test_check_done_state_done(self):
''' Test: check_done state == 'done' '''
mouse_line = self.browse_ref('warehouse.wh_move_line_12')
mouse_line.action_done()
for line in self.inventory.line_ids:
if line.goods_id.name == u'鼠标':
mouse = line
# 实际数量小与系统库存一个的时候,差异数量为-1
mouse.inventory_qty = mouse.real_qty - 1
mouse.onchange_qty()
# 此时鼠标数量-1,生成一个鼠标的出库单
self.inventory.generate_inventory()
# 鼠标进行批号管理,出库行必须选择一个批号
self.inventory.out_id.line_out_ids[0].lot_id = mouse_line.id
self.inventory.out_id.approve_order()
self.inventory.out_id.cancel_approved_order()
def test_get_difference_uos_qty(self):
''' Test: _get_difference_uos_qty '''
for line in self.inventory.line_ids:
if line.goods_id.name == u'鼠标':
mouse = line
# 实际辅助数量少1个
mouse.inventory_uos_qty = mouse.inventory_qty - 1
mouse.onchange_uos_qty()
self.assertEqual(mouse.difference_uos_qty, -1)
def test_check_difference_identical(self):
''' Test: check_difference_identical '''
for line in self.inventory.line_ids:
if line.goods_id.name == u'鼠标':
mouse = line
# 实际辅助数量少1个
mouse.inventory_uos_qty = mouse.inventory_qty - 1
mouse.onchange_uos_qty()
self.assertEqual(mouse.difference_uos_qty, -1)
# 盘盈盘亏数量应该与辅助单位的盘盈盘亏数量盈亏方向不一致
mouse.difference_qty = 1
mouse.check_difference_identical()
def test_check_done(self):
'''盘盈盘亏产生的入库单和出库单审核时检查'''
self.inventory.query_inventory()
self.inventory.generate_inventory()
def test_inventory_get_default_warehouse(self):
''' 测试 获取盘点仓库 '''
self.env['wh.inventory'].create({
'date': '2016-12-30',
'goods': '鼠标',
})
def test_generate_inventory_twice(self):
'''重复点击生成盘点单据按钮'''
self.inventory.query_inventory()
self.inventory.generate_inventory()
with self.assertRaises(UserError):
self.inventory.generate_inventory()
def test_inventory_line_get_difference_qty(self):
'''_get_difference_qty:difference_qty=0,difference_uos_qty!=0'''
for line in self.inventory.line_ids:
if line.goods_id.name == u'鼠标':
mouse = line
# 实际辅助数量少1个 实际数量为1
mouse.inventory_uos_qty = mouse.inventory_qty - 1
self.assertEqual(mouse.difference_qty, -1)
|
agpl-3.0
|
zzzeek/sqlalchemy
|
lib/sqlalchemy/engine/base.py
|
3
|
118014
|
# engine/base.py
# Copyright (C) 2005-2021 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: https://www.opensource.org/licenses/mit-license.php
from __future__ import with_statement
import contextlib
import sys
from .interfaces import Connectable
from .interfaces import ExceptionContext
from .util import _distill_params
from .util import _distill_params_20
from .util import TransactionalContext
from .. import exc
from .. import inspection
from .. import log
from .. import util
from ..sql import compiler
from ..sql import util as sql_util
"""Defines :class:`_engine.Connection` and :class:`_engine.Engine`.
"""
_EMPTY_EXECUTION_OPTS = util.immutabledict()
class Connection(Connectable):
"""Provides high-level functionality for a wrapped DB-API connection.
**This is the SQLAlchemy 1.x.x version** of the :class:`_engine.Connection`
class. For the :term:`2.0 style` version, which features some API
differences, see :class:`_future.Connection`.
The :class:`_engine.Connection` object is procured by calling
the :meth:`_engine.Engine.connect` method of the :class:`_engine.Engine`
object, and provides services for execution of SQL statements as well
as transaction control.
The Connection object is **not** thread-safe. While a Connection can be
shared among threads using properly synchronized access, it is still
possible that the underlying DBAPI connection may not support shared
access between threads. Check the DBAPI documentation for details.
The Connection object represents a single DBAPI connection checked out
from the connection pool. In this state, the connection pool has no affect
upon the connection, including its expiration or timeout state. For the
connection pool to properly manage connections, connections should be
returned to the connection pool (i.e. ``connection.close()``) whenever the
connection is not in use.
.. index::
single: thread safety; Connection
"""
_is_future = False
_sqla_logger_namespace = "sqlalchemy.engine.Connection"
# used by sqlalchemy.engine.util.TransactionalContext
_trans_context_manager = None
def __init__(
self,
engine,
connection=None,
close_with_result=False,
_branch_from=None,
_execution_options=None,
_dispatch=None,
_has_events=None,
_allow_revalidate=True,
):
"""Construct a new Connection."""
self.engine = engine
self.dialect = engine.dialect
self.__branch_from = _branch_from
if _branch_from:
# branching is always "from" the root connection
assert _branch_from.__branch_from is None
self._dbapi_connection = connection
self._execution_options = _execution_options
self._echo = _branch_from._echo
self.should_close_with_result = False
self.dispatch = _dispatch
self._has_events = _branch_from._has_events
else:
self._dbapi_connection = (
connection
if connection is not None
else engine.raw_connection()
)
self._transaction = self._nested_transaction = None
self.__savepoint_seq = 0
self.__in_begin = False
self.should_close_with_result = close_with_result
self.__can_reconnect = _allow_revalidate
self._echo = self.engine._should_log_info()
if _has_events is None:
# if _has_events is sent explicitly as False,
# then don't join the dispatch of the engine; we don't
# want to handle any of the engine's events in that case.
self.dispatch = self.dispatch._join(engine.dispatch)
self._has_events = _has_events or (
_has_events is None and engine._has_events
)
assert not _execution_options
self._execution_options = engine._execution_options
if self._has_events or self.engine._has_events:
self.dispatch.engine_connect(self, _branch_from is not None)
@util.memoized_property
def _message_formatter(self):
if "logging_token" in self._execution_options:
token = self._execution_options["logging_token"]
return lambda msg: "[%s] %s" % (token, msg)
else:
return None
def _log_info(self, message, *arg, **kw):
fmt = self._message_formatter
if fmt:
message = fmt(message)
self.engine.logger.info(message, *arg, **kw)
def _log_debug(self, message, *arg, **kw):
fmt = self._message_formatter
if fmt:
message = fmt(message)
self.engine.logger.debug(message, *arg, **kw)
@property
def _schema_translate_map(self):
return self._execution_options.get("schema_translate_map", None)
def schema_for_object(self, obj):
"""Return the schema name for the given schema item taking into
account current schema translate map.
"""
name = obj.schema
schema_translate_map = self._execution_options.get(
"schema_translate_map", None
)
if (
schema_translate_map
and name in schema_translate_map
and obj._use_schema_map
):
return schema_translate_map[name]
else:
return name
def _branch(self):
"""Return a new Connection which references this Connection's
engine and connection; but does not have close_with_result enabled,
and also whose close() method does nothing.
.. deprecated:: 1.4 the "branching" concept will be removed in
SQLAlchemy 2.0 as well as the "Connection.connect()" method which
is the only consumer for this.
The Core uses this very sparingly, only in the case of
custom SQL default functions that are to be INSERTed as the
primary key of a row where we need to get the value back, so we have
to invoke it distinctly - this is a very uncommon case.
Userland code accesses _branch() when the connect()
method is called. The branched connection
acts as much as possible like the parent, except that it stays
connected when a close() event occurs.
"""
return self.engine._connection_cls(
self.engine,
self._dbapi_connection,
_branch_from=self.__branch_from if self.__branch_from else self,
_execution_options=self._execution_options,
_has_events=self._has_events,
_dispatch=self.dispatch,
)
def _generate_for_options(self):
"""define connection method chaining behavior for execution_options"""
if self._is_future:
return self
else:
c = self.__class__.__new__(self.__class__)
c.__dict__ = self.__dict__.copy()
return c
def __enter__(self):
return self
def __exit__(self, type_, value, traceback):
self.close()
def execution_options(self, **opt):
r""" Set non-SQL options for the connection which take effect
during execution.
For a "future" style connection, this method returns this same
:class:`_future.Connection` object with the new options added.
For a legacy connection, this method returns a copy of this
:class:`_engine.Connection` which references the same underlying DBAPI
connection, but also defines the given execution options which will
take effect for a call to
:meth:`execute`. As the new :class:`_engine.Connection` references the
same underlying resource, it's usually a good idea to ensure that
the copies will be discarded immediately, which is implicit if used
as in::
result = connection.execution_options(stream_results=True).\
execute(stmt)
Note that any key/value can be passed to
:meth:`_engine.Connection.execution_options`,
and it will be stored in the
``_execution_options`` dictionary of the :class:`_engine.Connection`.
It
is suitable for usage by end-user schemes to communicate with
event listeners, for example.
The keywords that are currently recognized by SQLAlchemy itself
include all those listed under :meth:`.Executable.execution_options`,
as well as others that are specific to :class:`_engine.Connection`.
:param autocommit: Available on: Connection, statement.
When True, a COMMIT will be invoked after execution
when executed in 'autocommit' mode, i.e. when an explicit
transaction is not begun on the connection. Note that this
is **library level, not DBAPI level autocommit**. The DBAPI
connection will remain in a real transaction unless the
"AUTOCOMMIT" isolation level is used.
.. deprecated:: 1.4 The "autocommit" execution option is deprecated
and will be removed in SQLAlchemy 2.0. See
:ref:`migration_20_autocommit` for discussion.
:param compiled_cache: Available on: Connection.
A dictionary where :class:`.Compiled` objects
will be cached when the :class:`_engine.Connection`
compiles a clause
expression into a :class:`.Compiled` object. This dictionary will
supersede the statement cache that may be configured on the
:class:`_engine.Engine` itself. If set to None, caching
is disabled, even if the engine has a configured cache size.
Note that the ORM makes use of its own "compiled" caches for
some operations, including flush operations. The caching
used by the ORM internally supersedes a cache dictionary
specified here.
:param logging_token: Available on: :class:`_engine.Connection`,
:class:`_engine.Engine`.
Adds the specified string token surrounded by brackets in log
messages logged by the connection, i.e. the logging that's enabled
either via the :paramref:`_sa.create_engine.echo` flag or via the
``logging.getLogger("sqlalchemy.engine")`` logger. This allows a
per-connection or per-sub-engine token to be available which is
useful for debugging concurrent connection scenarios.
.. versionadded:: 1.4.0b2
.. seealso::
:ref:`dbengine_logging_tokens` - usage example
:paramref:`_sa.create_engine.logging_name` - adds a name to the
name used by the Python logger object itself.
:param isolation_level: Available on: :class:`_engine.Connection`.
Set the transaction isolation level for the lifespan of this
:class:`_engine.Connection` object.
Valid values include those string
values accepted by the :paramref:`_sa.create_engine.isolation_level`
parameter passed to :func:`_sa.create_engine`. These levels are
semi-database specific; see individual dialect documentation for
valid levels.
The isolation level option applies the isolation level by emitting
statements on the DBAPI connection, and **necessarily affects the
original Connection object overall**, not just the copy that is
returned by the call to :meth:`_engine.Connection.execution_options`
method. The isolation level will remain at the given setting until
the DBAPI connection itself is returned to the connection pool, i.e.
the :meth:`_engine.Connection.close` method on the original
:class:`_engine.Connection` is called,
where an event handler will emit
additional statements on the DBAPI connection in order to revert the
isolation level change.
.. warning:: The ``isolation_level`` execution option should
**not** be used when a transaction is already established, that
is, the :meth:`_engine.Connection.begin`
method or similar has been
called. A database cannot change the isolation level on a
transaction in progress, and different DBAPIs and/or
SQLAlchemy dialects may implicitly roll back or commit
the transaction, or not affect the connection at all.
.. note:: The ``isolation_level`` execution option is implicitly
reset if the :class:`_engine.Connection` is invalidated, e.g. via
the :meth:`_engine.Connection.invalidate` method, or if a
disconnection error occurs. The new connection produced after
the invalidation will not have the isolation level re-applied
to it automatically.
.. seealso::
:paramref:`_sa.create_engine.isolation_level`
- set per :class:`_engine.Engine` isolation level
:meth:`_engine.Connection.get_isolation_level`
- view current level
:ref:`SQLite Transaction Isolation <sqlite_isolation_level>`
:ref:`PostgreSQL Transaction Isolation <postgresql_isolation_level>`
:ref:`MySQL Transaction Isolation <mysql_isolation_level>`
:ref:`SQL Server Transaction Isolation <mssql_isolation_level>`
:ref:`session_transaction_isolation` - for the ORM
:param no_parameters: When ``True``, if the final parameter
list or dictionary is totally empty, will invoke the
statement on the cursor as ``cursor.execute(statement)``,
not passing the parameter collection at all.
Some DBAPIs such as psycopg2 and mysql-python consider
percent signs as significant only when parameters are
present; this option allows code to generate SQL
containing percent signs (and possibly other characters)
that is neutral regarding whether it's executed by the DBAPI
or piped into a script that's later invoked by
command line tools.
:param stream_results: Available on: Connection, statement.
Indicate to the dialect that results should be
"streamed" and not pre-buffered, if possible. This is a limitation
of many DBAPIs. The flag is currently understood within a subset
of dialects within the PostgreSQL and MySQL categories, and
may be supported by other third party dialects as well.
.. seealso::
:ref:`engine_stream_results`
:param schema_translate_map: Available on: Connection, Engine.
A dictionary mapping schema names to schema names, that will be
applied to the :paramref:`_schema.Table.schema` element of each
:class:`_schema.Table`
encountered when SQL or DDL expression elements
are compiled into strings; the resulting schema name will be
converted based on presence in the map of the original name.
.. versionadded:: 1.1
.. seealso::
:ref:`schema_translating`
.. seealso::
:meth:`_engine.Engine.execution_options`
:meth:`.Executable.execution_options`
:meth:`_engine.Connection.get_execution_options`
""" # noqa
c = self._generate_for_options()
c._execution_options = c._execution_options.union(opt)
if self._has_events or self.engine._has_events:
self.dispatch.set_connection_execution_options(c, opt)
self.dialect.set_connection_execution_options(c, opt)
return c
def get_execution_options(self):
"""Get the non-SQL options which will take effect during execution.
.. versionadded:: 1.3
.. seealso::
:meth:`_engine.Connection.execution_options`
"""
return self._execution_options
@property
def closed(self):
"""Return True if this connection is closed."""
# note this is independent for a "branched" connection vs.
# the base
return self._dbapi_connection is None and not self.__can_reconnect
@property
def invalidated(self):
"""Return True if this connection was invalidated."""
# prior to 1.4, "invalid" was stored as a state independent of
# "closed", meaning an invalidated connection could be "closed",
# the _dbapi_connection would be None and closed=True, yet the
# "invalid" flag would stay True. This meant that there were
# three separate states (open/valid, closed/valid, closed/invalid)
# when there is really no reason for that; a connection that's
# "closed" does not need to be "invalid". So the state is now
# represented by the two facts alone.
if self.__branch_from:
return self.__branch_from.invalidated
return self._dbapi_connection is None and not self.closed
@property
def connection(self):
"""The underlying DB-API connection managed by this Connection.
.. seealso::
:ref:`dbapi_connections`
"""
if self._dbapi_connection is None:
try:
return self._revalidate_connection()
except (exc.PendingRollbackError, exc.ResourceClosedError):
raise
except BaseException as e:
self._handle_dbapi_exception(e, None, None, None, None)
else:
return self._dbapi_connection
def get_isolation_level(self):
"""Return the current isolation level assigned to this
:class:`_engine.Connection`.
This will typically be the default isolation level as determined
by the dialect, unless if the
:paramref:`.Connection.execution_options.isolation_level`
feature has been used to alter the isolation level on a
per-:class:`_engine.Connection` basis.
This attribute will typically perform a live SQL operation in order
to procure the current isolation level, so the value returned is the
actual level on the underlying DBAPI connection regardless of how
this state was set. Compare to the
:attr:`_engine.Connection.default_isolation_level` accessor
which returns the dialect-level setting without performing a SQL
query.
.. versionadded:: 0.9.9
.. seealso::
:attr:`_engine.Connection.default_isolation_level`
- view default level
:paramref:`_sa.create_engine.isolation_level`
- set per :class:`_engine.Engine` isolation level
:paramref:`.Connection.execution_options.isolation_level`
- set per :class:`_engine.Connection` isolation level
"""
try:
return self.dialect.get_isolation_level(self.connection)
except BaseException as e:
self._handle_dbapi_exception(e, None, None, None, None)
@property
def default_isolation_level(self):
"""The default isolation level assigned to this
:class:`_engine.Connection`.
This is the isolation level setting that the
:class:`_engine.Connection`
has when first procured via the :meth:`_engine.Engine.connect` method.
This level stays in place until the
:paramref:`.Connection.execution_options.isolation_level` is used
to change the setting on a per-:class:`_engine.Connection` basis.
Unlike :meth:`_engine.Connection.get_isolation_level`,
this attribute is set
ahead of time from the first connection procured by the dialect,
so SQL query is not invoked when this accessor is called.
.. versionadded:: 0.9.9
.. seealso::
:meth:`_engine.Connection.get_isolation_level`
- view current level
:paramref:`_sa.create_engine.isolation_level`
- set per :class:`_engine.Engine` isolation level
:paramref:`.Connection.execution_options.isolation_level`
- set per :class:`_engine.Connection` isolation level
"""
return self.dialect.default_isolation_level
def _invalid_transaction(self):
if self.invalidated:
raise exc.PendingRollbackError(
"Can't reconnect until invalid %stransaction is rolled "
"back."
% (
"savepoint "
if self._nested_transaction is not None
else ""
),
code="8s2b",
)
else:
assert not self._is_future
raise exc.PendingRollbackError(
"This connection is on an inactive %stransaction. "
"Please rollback() fully before proceeding."
% (
"savepoint "
if self._nested_transaction is not None
else ""
),
code="8s2a",
)
def _revalidate_connection(self):
if self.__branch_from:
return self.__branch_from._revalidate_connection()
if self.__can_reconnect and self.invalidated:
if self._transaction is not None:
self._invalid_transaction()
self._dbapi_connection = self.engine.raw_connection(
_connection=self
)
return self._dbapi_connection
raise exc.ResourceClosedError("This Connection is closed")
@property
def _still_open_and_dbapi_connection_is_valid(self):
return self._dbapi_connection is not None and getattr(
self._dbapi_connection, "is_valid", False
)
@property
def info(self):
"""Info dictionary associated with the underlying DBAPI connection
referred to by this :class:`_engine.Connection`, allowing user-defined
data to be associated with the connection.
The data here will follow along with the DBAPI connection including
after it is returned to the connection pool and used again
in subsequent instances of :class:`_engine.Connection`.
"""
return self.connection.info
@util.deprecated_20(":meth:`.Connection.connect`")
def connect(self, close_with_result=False):
"""Returns a branched version of this :class:`_engine.Connection`.
The :meth:`_engine.Connection.close` method on the returned
:class:`_engine.Connection` can be called and this
:class:`_engine.Connection` will remain open.
This method provides usage symmetry with
:meth:`_engine.Engine.connect`, including for usage
with context managers.
"""
return self._branch()
def invalidate(self, exception=None):
"""Invalidate the underlying DBAPI connection associated with
this :class:`_engine.Connection`.
An attempt will be made to close the underlying DBAPI connection
immediately; however if this operation fails, the error is logged
but not raised. The connection is then discarded whether or not
close() succeeded.
Upon the next use (where "use" typically means using the
:meth:`_engine.Connection.execute` method or similar),
this :class:`_engine.Connection` will attempt to
procure a new DBAPI connection using the services of the
:class:`_pool.Pool` as a source of connectivity (e.g.
a "reconnection").
If a transaction was in progress (e.g. the
:meth:`_engine.Connection.begin` method has been called) when
:meth:`_engine.Connection.invalidate` method is called, at the DBAPI
level all state associated with this transaction is lost, as
the DBAPI connection is closed. The :class:`_engine.Connection`
will not allow a reconnection to proceed until the
:class:`.Transaction` object is ended, by calling the
:meth:`.Transaction.rollback` method; until that point, any attempt at
continuing to use the :class:`_engine.Connection` will raise an
:class:`~sqlalchemy.exc.InvalidRequestError`.
This is to prevent applications from accidentally
continuing an ongoing transactional operations despite the
fact that the transaction has been lost due to an
invalidation.
The :meth:`_engine.Connection.invalidate` method,
just like auto-invalidation,
will at the connection pool level invoke the
:meth:`_events.PoolEvents.invalidate` event.
:param exception: an optional ``Exception`` instance that's the
reason for the invalidation. is passed along to event handlers
and logging functions.
.. seealso::
:ref:`pool_connection_invalidation`
"""
if self.__branch_from:
return self.__branch_from.invalidate(exception=exception)
if self.invalidated:
return
if self.closed:
raise exc.ResourceClosedError("This Connection is closed")
if self._still_open_and_dbapi_connection_is_valid:
self._dbapi_connection.invalidate(exception)
self._dbapi_connection = None
def detach(self):
"""Detach the underlying DB-API connection from its connection pool.
E.g.::
with engine.connect() as conn:
conn.detach()
conn.execute(text("SET search_path TO schema1, schema2"))
# work with connection
# connection is fully closed (since we used "with:", can
# also call .close())
This :class:`_engine.Connection` instance will remain usable.
When closed
(or exited from a context manager context as above),
the DB-API connection will be literally closed and not
returned to its originating pool.
This method can be used to insulate the rest of an application
from a modified state on a connection (such as a transaction
isolation level or similar).
"""
self._dbapi_connection.detach()
def _autobegin(self):
self.begin()
def begin(self):
"""Begin a transaction and return a transaction handle.
The returned object is an instance of :class:`.Transaction`.
This object represents the "scope" of the transaction,
which completes when either the :meth:`.Transaction.rollback`
or :meth:`.Transaction.commit` method is called.
.. tip::
The :meth:`_engine.Connection.begin` method is invoked when using
the :meth:`_engine.Engine.begin` context manager method as well.
All documentation that refers to behaviors specific to the
:meth:`_engine.Connection.begin` method also apply to use of the
:meth:`_engine.Engine.begin` method.
Legacy use: nested calls to :meth:`.begin` on the same
:class:`_engine.Connection` will return new :class:`.Transaction`
objects that represent an emulated transaction within the scope of the
enclosing transaction, that is::
trans = conn.begin() # outermost transaction
trans2 = conn.begin() # "nested"
trans2.commit() # does nothing
trans.commit() # actually commits
Calls to :meth:`.Transaction.commit` only have an effect
when invoked via the outermost :class:`.Transaction` object, though the
:meth:`.Transaction.rollback` method of any of the
:class:`.Transaction` objects will roll back the
transaction.
.. tip::
The above "nesting" behavior is a legacy behavior specific to
:term:`1.x style` use and will be removed in SQLAlchemy 2.0. For
notes on :term:`2.0 style` use, see
:meth:`_future.Connection.begin`.
.. seealso::
:meth:`_engine.Connection.begin_nested` - use a SAVEPOINT
:meth:`_engine.Connection.begin_twophase` -
use a two phase /XID transaction
:meth:`_engine.Engine.begin` - context manager available from
:class:`_engine.Engine`
"""
if self._is_future:
assert not self.__branch_from
elif self.__branch_from:
return self.__branch_from.begin()
if self.__in_begin:
# for dialects that emit SQL within the process of
# dialect.do_begin() or dialect.do_begin_twophase(), this
# flag prevents "autobegin" from being emitted within that
# process, while allowing self._transaction to remain at None
# until it's complete.
return
elif self._transaction is None:
self._transaction = RootTransaction(self)
return self._transaction
else:
if self._is_future:
raise exc.InvalidRequestError(
"a transaction is already begun for this connection"
)
else:
return MarkerTransaction(self)
def begin_nested(self):
"""Begin a nested transaction (i.e. SAVEPOINT) and return a
transaction handle, assuming an outer transaction is already
established.
Nested transactions require SAVEPOINT support in the
underlying database. Any transaction in the hierarchy may
``commit`` and ``rollback``, however the outermost transaction
still controls the overall ``commit`` or ``rollback`` of the
transaction of a whole.
The legacy form of :meth:`_engine.Connection.begin_nested` method has
alternate behaviors based on whether or not the
:meth:`_engine.Connection.begin` method was called previously. If
:meth:`_engine.Connection.begin` was not called, then this method will
behave the same as the :meth:`_engine.Connection.begin` method and
return a :class:`.RootTransaction` object that begins and commits a
real transaction - **no savepoint is invoked**. If
:meth:`_engine.Connection.begin` **has** been called, and a
:class:`.RootTransaction` is already established, then this method
returns an instance of :class:`.NestedTransaction` which will invoke
and manage the scope of a SAVEPOINT.
.. tip::
The above mentioned behavior of
:meth:`_engine.Connection.begin_nested` is a legacy behavior
specific to :term:`1.x style` use. In :term:`2.0 style` use, the
:meth:`_future.Connection.begin_nested` method instead autobegins
the outer transaction that can be committed using
"commit-as-you-go" style; see
:meth:`_future.Connection.begin_nested` for migration details.
.. versionchanged:: 1.4.13 The behavior of
:meth:`_engine.Connection.begin_nested`
as returning a :class:`.RootTransaction` if
:meth:`_engine.Connection.begin` were not called has been restored
as was the case in 1.3.x versions; in previous 1.4.x versions, an
outer transaction would be "autobegun" but would not be committed.
.. seealso::
:meth:`_engine.Connection.begin`
:meth:`_engine.Connection.begin_twophase`
"""
if self._is_future:
assert not self.__branch_from
elif self.__branch_from:
return self.__branch_from.begin_nested()
if self._transaction is None:
if not self._is_future:
util.warn_deprecated_20(
"Calling Connection.begin_nested() in 2.0 style use will "
"return a NestedTransaction (SAVEPOINT) in all cases, "
"that will not commit the outer transaction. For code "
"that is cross-compatible between 1.x and 2.0 style use, "
"ensure Connection.begin() is called before calling "
"Connection.begin_nested()."
)
return self.begin()
else:
self._autobegin()
return NestedTransaction(self)
def begin_twophase(self, xid=None):
"""Begin a two-phase or XA transaction and return a transaction
handle.
The returned object is an instance of :class:`.TwoPhaseTransaction`,
which in addition to the methods provided by
:class:`.Transaction`, also provides a
:meth:`~.TwoPhaseTransaction.prepare` method.
:param xid: the two phase transaction id. If not supplied, a
random id will be generated.
.. seealso::
:meth:`_engine.Connection.begin`
:meth:`_engine.Connection.begin_twophase`
"""
if self.__branch_from:
return self.__branch_from.begin_twophase(xid=xid)
if self._transaction is not None:
raise exc.InvalidRequestError(
"Cannot start a two phase transaction when a transaction "
"is already in progress."
)
if xid is None:
xid = self.engine.dialect.create_xid()
return TwoPhaseTransaction(self, xid)
def recover_twophase(self):
return self.engine.dialect.do_recover_twophase(self)
def rollback_prepared(self, xid, recover=False):
self.engine.dialect.do_rollback_twophase(self, xid, recover=recover)
def commit_prepared(self, xid, recover=False):
self.engine.dialect.do_commit_twophase(self, xid, recover=recover)
def in_transaction(self):
"""Return True if a transaction is in progress."""
if self.__branch_from is not None:
return self.__branch_from.in_transaction()
return self._transaction is not None and self._transaction.is_active
def in_nested_transaction(self):
"""Return True if a transaction is in progress."""
if self.__branch_from is not None:
return self.__branch_from.in_nested_transaction()
return (
self._nested_transaction is not None
and self._nested_transaction.is_active
)
def _is_autocommit(self):
return (
self._execution_options.get("isolation_level", None)
== "AUTOCOMMIT"
)
def get_transaction(self):
"""Return the current root transaction in progress, if any.
.. versionadded:: 1.4
"""
if self.__branch_from is not None:
return self.__branch_from.get_transaction()
return self._transaction
def get_nested_transaction(self):
"""Return the current nested transaction in progress, if any.
.. versionadded:: 1.4
"""
if self.__branch_from is not None:
return self.__branch_from.get_nested_transaction()
return self._nested_transaction
def _begin_impl(self, transaction):
assert not self.__branch_from
if self._echo:
self._log_info("BEGIN (implicit)")
self.__in_begin = True
if self._has_events or self.engine._has_events:
self.dispatch.begin(self)
try:
self.engine.dialect.do_begin(self.connection)
except BaseException as e:
self._handle_dbapi_exception(e, None, None, None, None)
finally:
self.__in_begin = False
def _rollback_impl(self):
assert not self.__branch_from
if self._has_events or self.engine._has_events:
self.dispatch.rollback(self)
if self._still_open_and_dbapi_connection_is_valid:
if self._echo:
if self._is_autocommit():
self._log_info(
"ROLLBACK using DBAPI connection.rollback(), "
"DBAPI should ignore due to autocommit mode"
)
else:
self._log_info("ROLLBACK")
try:
self.engine.dialect.do_rollback(self.connection)
except BaseException as e:
self._handle_dbapi_exception(e, None, None, None, None)
def _commit_impl(self, autocommit=False):
assert not self.__branch_from
# AUTOCOMMIT isolation-level is a dialect-specific concept, however
# if a connection has this set as the isolation level, we can skip
# the "autocommit" warning as the operation will do "autocommit"
# in any case
if autocommit and not self._is_autocommit():
util.warn_deprecated_20(
"The current statement is being autocommitted using "
"implicit autocommit, which will be removed in "
"SQLAlchemy 2.0. "
"Use the .begin() method of Engine or Connection in order to "
"use an explicit transaction for DML and DDL statements."
)
if self._has_events or self.engine._has_events:
self.dispatch.commit(self)
if self._echo:
if self._is_autocommit():
self._log_info(
"COMMIT using DBAPI connection.commit(), "
"DBAPI should ignore due to autocommit mode"
)
else:
self._log_info("COMMIT")
try:
self.engine.dialect.do_commit(self.connection)
except BaseException as e:
self._handle_dbapi_exception(e, None, None, None, None)
def _savepoint_impl(self, name=None):
assert not self.__branch_from
if self._has_events or self.engine._has_events:
self.dispatch.savepoint(self, name)
if name is None:
self.__savepoint_seq += 1
name = "sa_savepoint_%s" % self.__savepoint_seq
if self._still_open_and_dbapi_connection_is_valid:
self.engine.dialect.do_savepoint(self, name)
return name
def _rollback_to_savepoint_impl(self, name):
assert not self.__branch_from
if self._has_events or self.engine._has_events:
self.dispatch.rollback_savepoint(self, name, None)
if self._still_open_and_dbapi_connection_is_valid:
self.engine.dialect.do_rollback_to_savepoint(self, name)
def _release_savepoint_impl(self, name):
assert not self.__branch_from
if self._has_events or self.engine._has_events:
self.dispatch.release_savepoint(self, name, None)
if self._still_open_and_dbapi_connection_is_valid:
self.engine.dialect.do_release_savepoint(self, name)
def _begin_twophase_impl(self, transaction):
assert not self.__branch_from
if self._echo:
self._log_info("BEGIN TWOPHASE (implicit)")
if self._has_events or self.engine._has_events:
self.dispatch.begin_twophase(self, transaction.xid)
if self._still_open_and_dbapi_connection_is_valid:
self.__in_begin = True
try:
self.engine.dialect.do_begin_twophase(self, transaction.xid)
except BaseException as e:
self._handle_dbapi_exception(e, None, None, None, None)
finally:
self.__in_begin = False
def _prepare_twophase_impl(self, xid):
assert not self.__branch_from
if self._has_events or self.engine._has_events:
self.dispatch.prepare_twophase(self, xid)
if self._still_open_and_dbapi_connection_is_valid:
assert isinstance(self._transaction, TwoPhaseTransaction)
try:
self.engine.dialect.do_prepare_twophase(self, xid)
except BaseException as e:
self._handle_dbapi_exception(e, None, None, None, None)
def _rollback_twophase_impl(self, xid, is_prepared):
assert not self.__branch_from
if self._has_events or self.engine._has_events:
self.dispatch.rollback_twophase(self, xid, is_prepared)
if self._still_open_and_dbapi_connection_is_valid:
assert isinstance(self._transaction, TwoPhaseTransaction)
try:
self.engine.dialect.do_rollback_twophase(
self, xid, is_prepared
)
except BaseException as e:
self._handle_dbapi_exception(e, None, None, None, None)
def _commit_twophase_impl(self, xid, is_prepared):
assert not self.__branch_from
if self._has_events or self.engine._has_events:
self.dispatch.commit_twophase(self, xid, is_prepared)
if self._still_open_and_dbapi_connection_is_valid:
assert isinstance(self._transaction, TwoPhaseTransaction)
try:
self.engine.dialect.do_commit_twophase(self, xid, is_prepared)
except BaseException as e:
self._handle_dbapi_exception(e, None, None, None, None)
def _autorollback(self):
if self.__branch_from:
self.__branch_from._autorollback()
if not self.in_transaction():
self._rollback_impl()
def _warn_for_legacy_exec_format(self):
util.warn_deprecated_20(
"The connection.execute() method in "
"SQLAlchemy 2.0 will accept parameters as a single "
"dictionary or a "
"single sequence of dictionaries only. "
"Parameters passed as keyword arguments, tuples or positionally "
"oriented dictionaries and/or tuples "
"will no longer be accepted."
)
def close(self):
"""Close this :class:`_engine.Connection`.
This results in a release of the underlying database
resources, that is, the DBAPI connection referenced
internally. The DBAPI connection is typically restored
back to the connection-holding :class:`_pool.Pool` referenced
by the :class:`_engine.Engine` that produced this
:class:`_engine.Connection`. Any transactional state present on
the DBAPI connection is also unconditionally released via
the DBAPI connection's ``rollback()`` method, regardless
of any :class:`.Transaction` object that may be
outstanding with regards to this :class:`_engine.Connection`.
After :meth:`_engine.Connection.close` is called, the
:class:`_engine.Connection` is permanently in a closed state,
and will allow no further operations.
"""
if self.__branch_from:
assert not self._is_future
util.warn_deprecated_20(
"The .close() method on a so-called 'branched' connection is "
"deprecated as of 1.4, as are 'branched' connections overall, "
"and will be removed in a future release. If this is a "
"default-handling function, don't close the connection."
)
self._dbapi_connection = None
self.__can_reconnect = False
return
if self._transaction:
self._transaction.close()
skip_reset = True
else:
skip_reset = False
if self._dbapi_connection is not None:
conn = self._dbapi_connection
# as we just closed the transaction, close the connection
# pool connection without doing an additional reset
if skip_reset:
conn._close_no_reset()
else:
conn.close()
# There is a slight chance that conn.close() may have
# triggered an invalidation here in which case
# _dbapi_connection would already be None, however usually
# it will be non-None here and in a "closed" state.
self._dbapi_connection = None
self.__can_reconnect = False
def scalar(self, object_, *multiparams, **params):
"""Executes and returns the first column of the first row.
The underlying result/cursor is closed after execution.
"""
return self.execute(object_, *multiparams, **params).scalar()
def execute(self, statement, *multiparams, **params):
r"""Executes a SQL statement construct and returns a
:class:`_engine.CursorResult`.
:param statement: The statement to be executed. May be
one of:
* a plain string (deprecated)
* any :class:`_expression.ClauseElement` construct that is also
a subclass of :class:`.Executable`, such as a
:func:`_expression.select` construct
* a :class:`.FunctionElement`, such as that generated
by :data:`.func`, will be automatically wrapped in
a SELECT statement, which is then executed.
* a :class:`.DDLElement` object
* a :class:`.DefaultGenerator` object
* a :class:`.Compiled` object
.. deprecated:: 2.0 passing a string to
:meth:`_engine.Connection.execute` is
deprecated and will be removed in version 2.0. Use the
:func:`_expression.text` construct with
:meth:`_engine.Connection.execute`, or the
:meth:`_engine.Connection.exec_driver_sql`
method to invoke a driver-level
SQL string.
:param \*multiparams/\**params: represent bound parameter
values to be used in the execution. Typically,
the format is either a collection of one or more
dictionaries passed to \*multiparams::
conn.execute(
table.insert(),
{"id":1, "value":"v1"},
{"id":2, "value":"v2"}
)
...or individual key/values interpreted by \**params::
conn.execute(
table.insert(), id=1, value="v1"
)
In the case that a plain SQL string is passed, and the underlying
DBAPI accepts positional bind parameters, a collection of tuples
or individual values in \*multiparams may be passed::
conn.execute(
"INSERT INTO table (id, value) VALUES (?, ?)",
(1, "v1"), (2, "v2")
)
conn.execute(
"INSERT INTO table (id, value) VALUES (?, ?)",
1, "v1"
)
Note above, the usage of a question mark "?" or other
symbol is contingent upon the "paramstyle" accepted by the DBAPI
in use, which may be any of "qmark", "named", "pyformat", "format",
"numeric". See `pep-249
<https://www.python.org/dev/peps/pep-0249/>`_ for details on
paramstyle.
To execute a textual SQL statement which uses bound parameters in a
DBAPI-agnostic way, use the :func:`_expression.text` construct.
.. deprecated:: 2.0 use of tuple or scalar positional parameters
is deprecated. All params should be dicts or sequences of dicts.
Use :meth:`.exec_driver_sql` to execute a plain string with
tuple or scalar positional parameters.
"""
if isinstance(statement, util.string_types):
util.warn_deprecated_20(
"Passing a string to Connection.execute() is "
"deprecated and will be removed in version 2.0. Use the "
"text() construct, "
"or the Connection.exec_driver_sql() method to invoke a "
"driver-level SQL string."
)
return self._exec_driver_sql(
statement,
multiparams,
params,
_EMPTY_EXECUTION_OPTS,
future=False,
)
try:
meth = statement._execute_on_connection
except AttributeError as err:
util.raise_(
exc.ObjectNotExecutableError(statement), replace_context=err
)
else:
return meth(self, multiparams, params, _EMPTY_EXECUTION_OPTS)
def _execute_function(self, func, multiparams, params, execution_options):
"""Execute a sql.FunctionElement object."""
return self._execute_clauseelement(
func.select(), multiparams, params, execution_options
)
def _execute_default(
self,
default,
multiparams,
params,
# migrate is calling this directly :(
execution_options=_EMPTY_EXECUTION_OPTS,
):
"""Execute a schema.ColumnDefault object."""
execution_options = self._execution_options.merge_with(
execution_options
)
distilled_parameters = _distill_params(self, multiparams, params)
if self._has_events or self.engine._has_events:
(
distilled_params,
event_multiparams,
event_params,
) = self._invoke_before_exec_event(
default, distilled_parameters, execution_options
)
try:
conn = self._dbapi_connection
if conn is None:
conn = self._revalidate_connection()
dialect = self.dialect
ctx = dialect.execution_ctx_cls._init_default(
dialect, self, conn, execution_options
)
except (exc.PendingRollbackError, exc.ResourceClosedError):
raise
except BaseException as e:
self._handle_dbapi_exception(e, None, None, None, None)
ret = ctx._exec_default(None, default, None)
if self.should_close_with_result:
self.close()
if self._has_events or self.engine._has_events:
self.dispatch.after_execute(
self,
default,
event_multiparams,
event_params,
execution_options,
ret,
)
return ret
def _execute_ddl(self, ddl, multiparams, params, execution_options):
"""Execute a schema.DDL object."""
execution_options = ddl._execution_options.merge_with(
self._execution_options, execution_options
)
distilled_parameters = _distill_params(self, multiparams, params)
if self._has_events or self.engine._has_events:
(
distilled_params,
event_multiparams,
event_params,
) = self._invoke_before_exec_event(
ddl, distilled_parameters, execution_options
)
exec_opts = self._execution_options.merge_with(execution_options)
schema_translate_map = exec_opts.get("schema_translate_map", None)
dialect = self.dialect
compiled = ddl.compile(
dialect=dialect, schema_translate_map=schema_translate_map
)
ret = self._execute_context(
dialect,
dialect.execution_ctx_cls._init_ddl,
compiled,
None,
execution_options,
compiled,
)
if self._has_events or self.engine._has_events:
self.dispatch.after_execute(
self,
ddl,
event_multiparams,
event_params,
execution_options,
ret,
)
return ret
def _invoke_before_exec_event(
self, elem, distilled_params, execution_options
):
if len(distilled_params) == 1:
event_multiparams, event_params = [], distilled_params[0]
else:
event_multiparams, event_params = distilled_params, {}
for fn in self.dispatch.before_execute:
elem, event_multiparams, event_params = fn(
self,
elem,
event_multiparams,
event_params,
execution_options,
)
if event_multiparams:
distilled_params = list(event_multiparams)
if event_params:
raise exc.InvalidRequestError(
"Event handler can't return non-empty multiparams "
"and params at the same time"
)
elif event_params:
distilled_params = [event_params]
else:
distilled_params = []
return distilled_params, event_multiparams, event_params
def _execute_clauseelement(
self, elem, multiparams, params, execution_options
):
"""Execute a sql.ClauseElement object."""
execution_options = elem._execution_options.merge_with(
self._execution_options, execution_options
)
distilled_params = _distill_params(self, multiparams, params)
has_events = self._has_events or self.engine._has_events
if has_events:
(
distilled_params,
event_multiparams,
event_params,
) = self._invoke_before_exec_event(
elem, distilled_params, execution_options
)
if distilled_params:
# ensure we don't retain a link to the view object for keys()
# which links to the values, which we don't want to cache
keys = sorted(distilled_params[0])
for_executemany = len(distilled_params) > 1
else:
keys = []
for_executemany = False
dialect = self.dialect
schema_translate_map = execution_options.get(
"schema_translate_map", None
)
compiled_cache = execution_options.get(
"compiled_cache", self.engine._compiled_cache
)
compiled_sql, extracted_params, cache_hit = elem._compile_w_cache(
dialect=dialect,
compiled_cache=compiled_cache,
column_keys=keys,
for_executemany=for_executemany,
schema_translate_map=schema_translate_map,
linting=self.dialect.compiler_linting | compiler.WARN_LINTING,
)
ret = self._execute_context(
dialect,
dialect.execution_ctx_cls._init_compiled,
compiled_sql,
distilled_params,
execution_options,
compiled_sql,
distilled_params,
elem,
extracted_params,
cache_hit=cache_hit,
)
if has_events:
self.dispatch.after_execute(
self,
elem,
event_multiparams,
event_params,
execution_options,
ret,
)
return ret
def _execute_compiled(
self,
compiled,
multiparams,
params,
execution_options=_EMPTY_EXECUTION_OPTS,
):
"""Execute a sql.Compiled object.
TODO: why do we have this? likely deprecate or remove
"""
execution_options = compiled.execution_options.merge_with(
self._execution_options, execution_options
)
distilled_parameters = _distill_params(self, multiparams, params)
if self._has_events or self.engine._has_events:
(
distilled_params,
event_multiparams,
event_params,
) = self._invoke_before_exec_event(
compiled, distilled_parameters, execution_options
)
dialect = self.dialect
ret = self._execute_context(
dialect,
dialect.execution_ctx_cls._init_compiled,
compiled,
distilled_parameters,
execution_options,
compiled,
distilled_parameters,
None,
None,
)
if self._has_events or self.engine._has_events:
self.dispatch.after_execute(
self,
compiled,
event_multiparams,
event_params,
execution_options,
ret,
)
return ret
def _exec_driver_sql(
self, statement, multiparams, params, execution_options, future
):
execution_options = self._execution_options.merge_with(
execution_options
)
distilled_parameters = _distill_params(self, multiparams, params)
if not future:
if self._has_events or self.engine._has_events:
(
distilled_params,
event_multiparams,
event_params,
) = self._invoke_before_exec_event(
statement, distilled_parameters, execution_options
)
dialect = self.dialect
ret = self._execute_context(
dialect,
dialect.execution_ctx_cls._init_statement,
statement,
distilled_parameters,
execution_options,
statement,
distilled_parameters,
)
if not future:
if self._has_events or self.engine._has_events:
self.dispatch.after_execute(
self,
statement,
event_multiparams,
event_params,
execution_options,
ret,
)
return ret
def _execute_20(
self,
statement,
parameters=None,
execution_options=_EMPTY_EXECUTION_OPTS,
):
args_10style, kwargs_10style = _distill_params_20(parameters)
try:
meth = statement._execute_on_connection
except AttributeError as err:
util.raise_(
exc.ObjectNotExecutableError(statement), replace_context=err
)
else:
return meth(self, args_10style, kwargs_10style, execution_options)
def exec_driver_sql(
self, statement, parameters=None, execution_options=None
):
r"""Executes a SQL statement construct and returns a
:class:`_engine.CursorResult`.
:param statement: The statement str to be executed. Bound parameters
must use the underlying DBAPI's paramstyle, such as "qmark",
"pyformat", "format", etc.
:param parameters: represent bound parameter values to be used in the
execution. The format is one of: a dictionary of named parameters,
a tuple of positional parameters, or a list containing either
dictionaries or tuples for multiple-execute support.
E.g. multiple dictionaries::
conn.exec_driver_sql(
"INSERT INTO table (id, value) VALUES (%(id)s, %(value)s)",
[{"id":1, "value":"v1"}, {"id":2, "value":"v2"}]
)
Single dictionary::
conn.exec_driver_sql(
"INSERT INTO table (id, value) VALUES (%(id)s, %(value)s)",
dict(id=1, value="v1")
)
Single tuple::
conn.exec_driver_sql(
"INSERT INTO table (id, value) VALUES (?, ?)",
(1, 'v1')
)
.. note:: The :meth:`_engine.Connection.exec_driver_sql` method does
not participate in the
:meth:`_events.ConnectionEvents.before_execute` and
:meth:`_events.ConnectionEvents.after_execute` events. To
intercept calls to :meth:`_engine.Connection.exec_driver_sql`, use
:meth:`_events.ConnectionEvents.before_cursor_execute` and
:meth:`_events.ConnectionEvents.after_cursor_execute`.
.. seealso::
:pep:`249`
"""
args_10style, kwargs_10style = _distill_params_20(parameters)
return self._exec_driver_sql(
statement,
args_10style,
kwargs_10style,
execution_options,
future=True,
)
def _execute_context(
self,
dialect,
constructor,
statement,
parameters,
execution_options,
*args,
**kw
):
"""Create an :class:`.ExecutionContext` and execute, returning
a :class:`_engine.CursorResult`."""
branched = self
if self.__branch_from:
# if this is a "branched" connection, do everything in terms
# of the "root" connection, *except* for .close(), which is
# the only feature that branching provides
self = self.__branch_from
try:
conn = self._dbapi_connection
if conn is None:
conn = self._revalidate_connection()
context = constructor(
dialect, self, conn, execution_options, *args, **kw
)
except (exc.PendingRollbackError, exc.ResourceClosedError):
raise
except BaseException as e:
self._handle_dbapi_exception(
e, util.text_type(statement), parameters, None, None
)
if (
self._transaction
and not self._transaction.is_active
or (
self._nested_transaction
and not self._nested_transaction.is_active
)
):
self._invalid_transaction()
elif self._trans_context_manager:
TransactionalContext._trans_ctx_check(self)
if self._is_future and self._transaction is None:
self._autobegin()
context.pre_exec()
if dialect.use_setinputsizes:
context._set_input_sizes()
cursor, statement, parameters = (
context.cursor,
context.statement,
context.parameters,
)
if not context.executemany:
parameters = parameters[0]
if self._has_events or self.engine._has_events:
for fn in self.dispatch.before_cursor_execute:
statement, parameters = fn(
self,
cursor,
statement,
parameters,
context,
context.executemany,
)
if self._echo:
self._log_info(statement)
stats = context._get_cache_stats()
if not self.engine.hide_parameters:
self._log_info(
"[%s] %r",
stats,
sql_util._repr_params(
parameters, batches=10, ismulti=context.executemany
),
)
else:
self._log_info(
"[%s] [SQL parameters hidden due to hide_parameters=True]"
% (stats,)
)
evt_handled = False
try:
if context.executemany:
if self.dialect._has_events:
for fn in self.dialect.dispatch.do_executemany:
if fn(cursor, statement, parameters, context):
evt_handled = True
break
if not evt_handled:
self.dialect.do_executemany(
cursor, statement, parameters, context
)
elif not parameters and context.no_parameters:
if self.dialect._has_events:
for fn in self.dialect.dispatch.do_execute_no_params:
if fn(cursor, statement, context):
evt_handled = True
break
if not evt_handled:
self.dialect.do_execute_no_params(
cursor, statement, context
)
else:
if self.dialect._has_events:
for fn in self.dialect.dispatch.do_execute:
if fn(cursor, statement, parameters, context):
evt_handled = True
break
if not evt_handled:
self.dialect.do_execute(
cursor, statement, parameters, context
)
if self._has_events or self.engine._has_events:
self.dispatch.after_cursor_execute(
self,
cursor,
statement,
parameters,
context,
context.executemany,
)
context.post_exec()
result = context._setup_result_proxy()
if not self._is_future:
should_close_with_result = branched.should_close_with_result
if not result._soft_closed and should_close_with_result:
result._autoclose_connection = True
if (
# usually we're in a transaction so avoid relatively
# expensive / legacy should_autocommit call
self._transaction is None
and context.should_autocommit
):
self._commit_impl(autocommit=True)
# for "connectionless" execution, we have to close this
# Connection after the statement is complete.
# legacy stuff.
if should_close_with_result and context._soft_closed:
assert not self._is_future
# CursorResult already exhausted rows / has no rows.
# close us now
branched.close()
except BaseException as e:
self._handle_dbapi_exception(
e, statement, parameters, cursor, context
)
return result
def _cursor_execute(self, cursor, statement, parameters, context=None):
"""Execute a statement + params on the given cursor.
Adds appropriate logging and exception handling.
This method is used by DefaultDialect for special-case
executions, such as for sequences and column defaults.
The path of statement execution in the majority of cases
terminates at _execute_context().
"""
if self._has_events or self.engine._has_events:
for fn in self.dispatch.before_cursor_execute:
statement, parameters = fn(
self, cursor, statement, parameters, context, False
)
if self._echo:
self._log_info(statement)
self._log_info("[raw sql] %r", parameters)
try:
for fn in (
()
if not self.dialect._has_events
else self.dialect.dispatch.do_execute
):
if fn(cursor, statement, parameters, context):
break
else:
self.dialect.do_execute(cursor, statement, parameters, context)
except BaseException as e:
self._handle_dbapi_exception(
e, statement, parameters, cursor, context
)
if self._has_events or self.engine._has_events:
self.dispatch.after_cursor_execute(
self, cursor, statement, parameters, context, False
)
def _safe_close_cursor(self, cursor):
"""Close the given cursor, catching exceptions
and turning into log warnings.
"""
try:
cursor.close()
except Exception:
# log the error through the connection pool's logger.
self.engine.pool.logger.error(
"Error closing cursor", exc_info=True
)
_reentrant_error = False
_is_disconnect = False
def _handle_dbapi_exception(
self, e, statement, parameters, cursor, context
):
exc_info = sys.exc_info()
is_exit_exception = util.is_exit_exception(e)
if not self._is_disconnect:
self._is_disconnect = (
isinstance(e, self.dialect.dbapi.Error)
and not self.closed
and self.dialect.is_disconnect(
e,
self._dbapi_connection if not self.invalidated else None,
cursor,
)
) or (is_exit_exception and not self.closed)
invalidate_pool_on_disconnect = not is_exit_exception
if self._reentrant_error:
util.raise_(
exc.DBAPIError.instance(
statement,
parameters,
e,
self.dialect.dbapi.Error,
hide_parameters=self.engine.hide_parameters,
dialect=self.dialect,
ismulti=context.executemany
if context is not None
else None,
),
with_traceback=exc_info[2],
from_=e,
)
self._reentrant_error = True
try:
# non-DBAPI error - if we already got a context,
# or there's no string statement, don't wrap it
should_wrap = isinstance(e, self.dialect.dbapi.Error) or (
statement is not None
and context is None
and not is_exit_exception
)
if should_wrap:
sqlalchemy_exception = exc.DBAPIError.instance(
statement,
parameters,
e,
self.dialect.dbapi.Error,
hide_parameters=self.engine.hide_parameters,
connection_invalidated=self._is_disconnect,
dialect=self.dialect,
ismulti=context.executemany
if context is not None
else None,
)
else:
sqlalchemy_exception = None
newraise = None
if (
self._has_events or self.engine._has_events
) and not self._execution_options.get(
"skip_user_error_events", False
):
ctx = ExceptionContextImpl(
e,
sqlalchemy_exception,
self.engine,
self,
cursor,
statement,
parameters,
context,
self._is_disconnect,
invalidate_pool_on_disconnect,
)
for fn in self.dispatch.handle_error:
try:
# handler returns an exception;
# call next handler in a chain
per_fn = fn(ctx)
if per_fn is not None:
ctx.chained_exception = newraise = per_fn
except Exception as _raised:
# handler raises an exception - stop processing
newraise = _raised
break
if self._is_disconnect != ctx.is_disconnect:
self._is_disconnect = ctx.is_disconnect
if sqlalchemy_exception:
sqlalchemy_exception.connection_invalidated = (
ctx.is_disconnect
)
# set up potentially user-defined value for
# invalidate pool.
invalidate_pool_on_disconnect = (
ctx.invalidate_pool_on_disconnect
)
if should_wrap and context:
context.handle_dbapi_exception(e)
if not self._is_disconnect:
if cursor:
self._safe_close_cursor(cursor)
with util.safe_reraise(warn_only=True):
self._autorollback()
if newraise:
util.raise_(newraise, with_traceback=exc_info[2], from_=e)
elif should_wrap:
util.raise_(
sqlalchemy_exception, with_traceback=exc_info[2], from_=e
)
else:
util.raise_(exc_info[1], with_traceback=exc_info[2])
finally:
del self._reentrant_error
if self._is_disconnect:
del self._is_disconnect
if not self.invalidated:
dbapi_conn_wrapper = self._dbapi_connection
if invalidate_pool_on_disconnect:
self.engine.pool._invalidate(dbapi_conn_wrapper, e)
self.invalidate(e)
if self.should_close_with_result:
assert not self._is_future
self.close()
@classmethod
def _handle_dbapi_exception_noconnection(cls, e, dialect, engine):
exc_info = sys.exc_info()
is_disconnect = dialect.is_disconnect(e, None, None)
should_wrap = isinstance(e, dialect.dbapi.Error)
if should_wrap:
sqlalchemy_exception = exc.DBAPIError.instance(
None,
None,
e,
dialect.dbapi.Error,
hide_parameters=engine.hide_parameters,
connection_invalidated=is_disconnect,
)
else:
sqlalchemy_exception = None
newraise = None
if engine._has_events:
ctx = ExceptionContextImpl(
e,
sqlalchemy_exception,
engine,
None,
None,
None,
None,
None,
is_disconnect,
True,
)
for fn in engine.dispatch.handle_error:
try:
# handler returns an exception;
# call next handler in a chain
per_fn = fn(ctx)
if per_fn is not None:
ctx.chained_exception = newraise = per_fn
except Exception as _raised:
# handler raises an exception - stop processing
newraise = _raised
break
if sqlalchemy_exception and is_disconnect != ctx.is_disconnect:
sqlalchemy_exception.connection_invalidated = (
is_disconnect
) = ctx.is_disconnect
if newraise:
util.raise_(newraise, with_traceback=exc_info[2], from_=e)
elif should_wrap:
util.raise_(
sqlalchemy_exception, with_traceback=exc_info[2], from_=e
)
else:
util.raise_(exc_info[1], with_traceback=exc_info[2])
def _run_ddl_visitor(self, visitorcallable, element, **kwargs):
"""run a DDL visitor.
This method is only here so that the MockConnection can change the
options given to the visitor so that "checkfirst" is skipped.
"""
visitorcallable(self.dialect, self, **kwargs).traverse_single(element)
@util.deprecated(
"1.4",
"The :meth:`_engine.Connection.transaction` "
"method is deprecated and will be "
"removed in a future release. Use the :meth:`_engine.Engine.begin` "
"context manager instead.",
)
def transaction(self, callable_, *args, **kwargs):
r"""Execute the given function within a transaction boundary.
The function is passed this :class:`_engine.Connection`
as the first argument, followed by the given \*args and \**kwargs,
e.g.::
def do_something(conn, x, y):
conn.execute(text("some statement"), {'x':x, 'y':y})
conn.transaction(do_something, 5, 10)
The operations inside the function are all invoked within the
context of a single :class:`.Transaction`.
Upon success, the transaction is committed. If an
exception is raised, the transaction is rolled back
before propagating the exception.
.. note::
The :meth:`.transaction` method is superseded by
the usage of the Python ``with:`` statement, which can
be used with :meth:`_engine.Connection.begin`::
with conn.begin():
conn.execute(text("some statement"), {'x':5, 'y':10})
As well as with :meth:`_engine.Engine.begin`::
with engine.begin() as conn:
conn.execute(text("some statement"), {'x':5, 'y':10})
.. seealso::
:meth:`_engine.Engine.begin` - engine-level transactional
context
:meth:`_engine.Engine.transaction` - engine-level version of
:meth:`_engine.Connection.transaction`
"""
kwargs["_sa_skip_warning"] = True
trans = self.begin()
try:
ret = self.run_callable(callable_, *args, **kwargs)
trans.commit()
return ret
except:
with util.safe_reraise():
trans.rollback()
@util.deprecated(
"1.4",
"The :meth:`_engine.Connection.run_callable` "
"method is deprecated and will "
"be removed in a future release. Invoke the callable function "
"directly, passing the Connection.",
)
def run_callable(self, callable_, *args, **kwargs):
r"""Given a callable object or function, execute it, passing
a :class:`_engine.Connection` as the first argument.
The given \*args and \**kwargs are passed subsequent
to the :class:`_engine.Connection` argument.
This function, along with :meth:`_engine.Engine.run_callable`,
allows a function to be run with a :class:`_engine.Connection`
or :class:`_engine.Engine` object without the need to know
which one is being dealt with.
"""
return callable_(self, *args, **kwargs)
class ExceptionContextImpl(ExceptionContext):
"""Implement the :class:`.ExceptionContext` interface."""
def __init__(
self,
exception,
sqlalchemy_exception,
engine,
connection,
cursor,
statement,
parameters,
context,
is_disconnect,
invalidate_pool_on_disconnect,
):
self.engine = engine
self.connection = connection
self.sqlalchemy_exception = sqlalchemy_exception
self.original_exception = exception
self.execution_context = context
self.statement = statement
self.parameters = parameters
self.is_disconnect = is_disconnect
self.invalidate_pool_on_disconnect = invalidate_pool_on_disconnect
class Transaction(TransactionalContext):
"""Represent a database transaction in progress.
The :class:`.Transaction` object is procured by
calling the :meth:`_engine.Connection.begin` method of
:class:`_engine.Connection`::
from sqlalchemy import create_engine
engine = create_engine("postgresql://scott:tiger@localhost/test")
connection = engine.connect()
trans = connection.begin()
connection.execute(text("insert into x (a, b) values (1, 2)"))
trans.commit()
The object provides :meth:`.rollback` and :meth:`.commit`
methods in order to control transaction boundaries. It
also implements a context manager interface so that
the Python ``with`` statement can be used with the
:meth:`_engine.Connection.begin` method::
with connection.begin():
connection.execute(text("insert into x (a, b) values (1, 2)"))
The Transaction object is **not** threadsafe.
.. seealso::
:meth:`_engine.Connection.begin`
:meth:`_engine.Connection.begin_twophase`
:meth:`_engine.Connection.begin_nested`
.. index::
single: thread safety; Transaction
"""
__slots__ = ()
_is_root = False
def __init__(self, connection):
raise NotImplementedError()
def _do_deactivate(self):
"""do whatever steps are necessary to set this transaction as
"deactive", however leave this transaction object in place as far
as the connection's state.
for a "real" transaction this should roll back the transaction
and ensure this transaction is no longer a reset agent.
this is used for nesting of marker transactions where the marker
can set the "real" transaction as rolled back, however it stays
in place.
for 2.0 we hope to remove this nesting feature.
"""
raise NotImplementedError()
@property
def _deactivated_from_connection(self):
"""True if this transaction is totally deactivated from the connection
and therefore can no longer affect its state.
"""
raise NotImplementedError()
def _do_close(self):
raise NotImplementedError()
def _do_rollback(self):
raise NotImplementedError()
def _do_commit(self):
raise NotImplementedError()
@property
def is_valid(self):
return self.is_active and not self.connection.invalidated
def close(self):
"""Close this :class:`.Transaction`.
If this transaction is the base transaction in a begin/commit
nesting, the transaction will rollback(). Otherwise, the
method returns.
This is used to cancel a Transaction without affecting the scope of
an enclosing transaction.
"""
try:
self._do_close()
finally:
assert not self.is_active
def rollback(self):
"""Roll back this :class:`.Transaction`.
The implementation of this may vary based on the type of transaction in
use:
* For a simple database transaction (e.g. :class:`.RootTransaction`),
it corresponds to a ROLLBACK.
* For a :class:`.NestedTransaction`, it corresponds to a
"ROLLBACK TO SAVEPOINT" operation.
* For a :class:`.TwoPhaseTransaction`, DBAPI-specific methods for two
phase transactions may be used.
"""
try:
self._do_rollback()
finally:
assert not self.is_active
def commit(self):
"""Commit this :class:`.Transaction`.
The implementation of this may vary based on the type of transaction in
use:
* For a simple database transaction (e.g. :class:`.RootTransaction`),
it corresponds to a COMMIT.
* For a :class:`.NestedTransaction`, it corresponds to a
"RELEASE SAVEPOINT" operation.
* For a :class:`.TwoPhaseTransaction`, DBAPI-specific methods for two
phase transactions may be used.
"""
try:
self._do_commit()
finally:
assert not self.is_active
def _get_subject(self):
return self.connection
def _transaction_is_active(self):
return self.is_active
def _transaction_is_closed(self):
return not self._deactivated_from_connection
class MarkerTransaction(Transaction):
"""A 'marker' transaction that is used for nested begin() calls.
.. deprecated:: 1.4 future connection for 2.0 won't support this pattern.
"""
__slots__ = ("connection", "_is_active", "_transaction")
def __init__(self, connection):
assert connection._transaction is not None
if not connection._transaction.is_active:
raise exc.InvalidRequestError(
"the current transaction on this connection is inactive. "
"Please issue a rollback first."
)
assert not connection._is_future
util.warn_deprecated_20(
"Calling .begin() when a transaction is already begun, creating "
"a 'sub' transaction, is deprecated "
"and will be removed in 2.0. See the documentation section "
"'Migrating from the nesting pattern' for background on how "
"to migrate from this pattern."
)
self.connection = connection
if connection._trans_context_manager:
TransactionalContext._trans_ctx_check(connection)
if connection._nested_transaction is not None:
self._transaction = connection._nested_transaction
else:
self._transaction = connection._transaction
self._is_active = True
@property
def _deactivated_from_connection(self):
return not self.is_active
@property
def is_active(self):
return self._is_active and self._transaction.is_active
def _deactivate(self):
self._is_active = False
def _do_close(self):
# does not actually roll back the root
self._deactivate()
def _do_rollback(self):
# does roll back the root
if self._is_active:
try:
self._transaction._do_deactivate()
finally:
self._deactivate()
def _do_commit(self):
self._deactivate()
class RootTransaction(Transaction):
"""Represent the "root" transaction on a :class:`_engine.Connection`.
This corresponds to the current "BEGIN/COMMIT/ROLLBACK" that's occurring
for the :class:`_engine.Connection`. The :class:`_engine.RootTransaction`
is created by calling upon the :meth:`_engine.Connection.begin` method, and
remains associated with the :class:`_engine.Connection` throughout its
active span. The current :class:`_engine.RootTransaction` in use is
accessible via the :attr:`_engine.Connection.get_transaction` method of
:class:`_engine.Connection`.
In :term:`2.0 style` use, the :class:`_future.Connection` also employs
"autobegin" behavior that will create a new
:class:`_engine.RootTransaction` whenever a connection in a
non-transactional state is used to emit commands on the DBAPI connection.
The scope of the :class:`_engine.RootTransaction` in 2.0 style
use can be controlled using the :meth:`_future.Connection.commit` and
:meth:`_future.Connection.rollback` methods.
"""
_is_root = True
__slots__ = ("connection", "is_active")
def __init__(self, connection):
assert connection._transaction is None
if connection._trans_context_manager:
TransactionalContext._trans_ctx_check(connection)
self.connection = connection
self._connection_begin_impl()
connection._transaction = self
self.is_active = True
def _deactivate_from_connection(self):
if self.is_active:
assert self.connection._transaction is self
self.is_active = False
elif self.connection._transaction is not self:
util.warn("transaction already deassociated from connection")
@property
def _deactivated_from_connection(self):
return self.connection._transaction is not self
def _do_deactivate(self):
# called from a MarkerTransaction to cancel this root transaction.
# the transaction stays in place as connection._transaction, but
# is no longer active and is no longer the reset agent for the
# pooled connection. the connection won't support a new begin()
# until this transaction is explicitly closed, rolled back,
# or committed.
assert self.connection._transaction is self
if self.is_active:
self._connection_rollback_impl()
# handle case where a savepoint was created inside of a marker
# transaction that refers to a root. nested has to be cancelled
# also.
if self.connection._nested_transaction:
self.connection._nested_transaction._cancel()
self._deactivate_from_connection()
def _connection_begin_impl(self):
self.connection._begin_impl(self)
def _connection_rollback_impl(self):
self.connection._rollback_impl()
def _connection_commit_impl(self):
self.connection._commit_impl()
def _close_impl(self, try_deactivate=False):
try:
if self.is_active:
self._connection_rollback_impl()
if self.connection._nested_transaction:
self.connection._nested_transaction._cancel()
finally:
if self.is_active or try_deactivate:
self._deactivate_from_connection()
if self.connection._transaction is self:
self.connection._transaction = None
assert not self.is_active
assert self.connection._transaction is not self
def _do_close(self):
self._close_impl()
def _do_rollback(self):
self._close_impl(try_deactivate=True)
def _do_commit(self):
if self.is_active:
assert self.connection._transaction is self
try:
self._connection_commit_impl()
finally:
# whether or not commit succeeds, cancel any
# nested transactions, make this transaction "inactive"
# and remove it as a reset agent
if self.connection._nested_transaction:
self.connection._nested_transaction._cancel()
self._deactivate_from_connection()
# ...however only remove as the connection's current transaction
# if commit succeeded. otherwise it stays on so that a rollback
# needs to occur.
self.connection._transaction = None
else:
if self.connection._transaction is self:
self.connection._invalid_transaction()
else:
raise exc.InvalidRequestError("This transaction is inactive")
assert not self.is_active
assert self.connection._transaction is not self
class NestedTransaction(Transaction):
"""Represent a 'nested', or SAVEPOINT transaction.
The :class:`.NestedTransaction` object is created by calling the
:meth:`_engine.Connection.begin_nested` method of
:class:`_engine.Connection`.
When using :class:`.NestedTransaction`, the semantics of "begin" /
"commit" / "rollback" are as follows:
* the "begin" operation corresponds to the "BEGIN SAVEPOINT" command, where
the savepoint is given an explicit name that is part of the state
of this object.
* The :meth:`.NestedTransaction.commit` method corresponds to a
"RELEASE SAVEPOINT" operation, using the savepoint identifier associated
with this :class:`.NestedTransaction`.
* The :meth:`.NestedTransaction.rollback` method corresponds to a
"ROLLBACK TO SAVEPOINT" operation, using the savepoint identifier
associated with this :class:`.NestedTransaction`.
The rationale for mimicking the semantics of an outer transaction in
terms of savepoints so that code may deal with a "savepoint" transaction
and an "outer" transaction in an agnostic way.
.. seealso::
:ref:`session_begin_nested` - ORM version of the SAVEPOINT API.
"""
__slots__ = ("connection", "is_active", "_savepoint", "_previous_nested")
def __init__(self, connection):
assert connection._transaction is not None
if connection._trans_context_manager:
TransactionalContext._trans_ctx_check(connection)
self.connection = connection
self._savepoint = self.connection._savepoint_impl()
self.is_active = True
self._previous_nested = connection._nested_transaction
connection._nested_transaction = self
def _deactivate_from_connection(self, warn=True):
if self.connection._nested_transaction is self:
self.connection._nested_transaction = self._previous_nested
elif warn:
util.warn(
"nested transaction already deassociated from connection"
)
@property
def _deactivated_from_connection(self):
return self.connection._nested_transaction is not self
def _cancel(self):
# called by RootTransaction when the outer transaction is
# committed, rolled back, or closed to cancel all savepoints
# without any action being taken
self.is_active = False
self._deactivate_from_connection()
if self._previous_nested:
self._previous_nested._cancel()
def _close_impl(self, deactivate_from_connection, warn_already_deactive):
try:
if self.is_active and self.connection._transaction.is_active:
self.connection._rollback_to_savepoint_impl(self._savepoint)
finally:
self.is_active = False
if deactivate_from_connection:
self._deactivate_from_connection(warn=warn_already_deactive)
assert not self.is_active
if deactivate_from_connection:
assert self.connection._nested_transaction is not self
def _do_deactivate(self):
self._close_impl(False, False)
def _do_close(self):
self._close_impl(True, False)
def _do_rollback(self):
self._close_impl(True, True)
def _do_commit(self):
if self.is_active:
try:
self.connection._release_savepoint_impl(self._savepoint)
finally:
# nested trans becomes inactive on failed release
# unconditionally. this prevents it from trying to
# emit SQL when it rolls back.
self.is_active = False
# but only de-associate from connection if it succeeded
self._deactivate_from_connection()
else:
if self.connection._nested_transaction is self:
self.connection._invalid_transaction()
else:
raise exc.InvalidRequestError(
"This nested transaction is inactive"
)
class TwoPhaseTransaction(RootTransaction):
"""Represent a two-phase transaction.
A new :class:`.TwoPhaseTransaction` object may be procured
using the :meth:`_engine.Connection.begin_twophase` method.
The interface is the same as that of :class:`.Transaction`
with the addition of the :meth:`prepare` method.
"""
__slots__ = ("connection", "is_active", "xid", "_is_prepared")
def __init__(self, connection, xid):
self._is_prepared = False
self.xid = xid
super(TwoPhaseTransaction, self).__init__(connection)
def prepare(self):
"""Prepare this :class:`.TwoPhaseTransaction`.
After a PREPARE, the transaction can be committed.
"""
if not self.is_active:
raise exc.InvalidRequestError("This transaction is inactive")
self.connection._prepare_twophase_impl(self.xid)
self._is_prepared = True
def _connection_begin_impl(self):
self.connection._begin_twophase_impl(self)
def _connection_rollback_impl(self):
self.connection._rollback_twophase_impl(self.xid, self._is_prepared)
def _connection_commit_impl(self):
self.connection._commit_twophase_impl(self.xid, self._is_prepared)
class Engine(Connectable, log.Identified):
"""
Connects a :class:`~sqlalchemy.pool.Pool` and
:class:`~sqlalchemy.engine.interfaces.Dialect` together to provide a
source of database connectivity and behavior.
This is the **SQLAlchemy 1.x version** of :class:`_engine.Engine`. For
the :term:`2.0 style` version, which includes some API differences,
see :class:`_future.Engine`.
An :class:`_engine.Engine` object is instantiated publicly using the
:func:`~sqlalchemy.create_engine` function.
.. seealso::
:doc:`/core/engines`
:ref:`connections_toplevel`
"""
_execution_options = _EMPTY_EXECUTION_OPTS
_has_events = False
_connection_cls = Connection
_sqla_logger_namespace = "sqlalchemy.engine.Engine"
_is_future = False
_schema_translate_map = None
def __init__(
self,
pool,
dialect,
url,
logging_name=None,
echo=None,
query_cache_size=500,
execution_options=None,
hide_parameters=False,
):
self.pool = pool
self.url = url
self.dialect = dialect
if logging_name:
self.logging_name = logging_name
self.echo = echo
self.hide_parameters = hide_parameters
if query_cache_size != 0:
self._compiled_cache = util.LRUCache(
query_cache_size, size_alert=self._lru_size_alert
)
else:
self._compiled_cache = None
log.instance_logger(self, echoflag=echo)
if execution_options:
self.update_execution_options(**execution_options)
def _lru_size_alert(self, cache):
if self._should_log_info:
self.logger.info(
"Compiled cache size pruning from %d items to %d. "
"Increase cache size to reduce the frequency of pruning.",
len(cache),
cache.capacity,
)
@property
def engine(self):
return self
def clear_compiled_cache(self):
"""Clear the compiled cache associated with the dialect.
This applies **only** to the built-in cache that is established
via the :paramref:`_engine.create_engine.query_cache_size` parameter.
It will not impact any dictionary caches that were passed via the
:paramref:`.Connection.execution_options.query_cache` parameter.
.. versionadded:: 1.4
"""
if self._compiled_cache:
self._compiled_cache.clear()
def update_execution_options(self, **opt):
r"""Update the default execution_options dictionary
of this :class:`_engine.Engine`.
The given keys/values in \**opt are added to the
default execution options that will be used for
all connections. The initial contents of this dictionary
can be sent via the ``execution_options`` parameter
to :func:`_sa.create_engine`.
.. seealso::
:meth:`_engine.Connection.execution_options`
:meth:`_engine.Engine.execution_options`
"""
self._execution_options = self._execution_options.union(opt)
self.dispatch.set_engine_execution_options(self, opt)
self.dialect.set_engine_execution_options(self, opt)
def execution_options(self, **opt):
"""Return a new :class:`_engine.Engine` that will provide
:class:`_engine.Connection` objects with the given execution options.
The returned :class:`_engine.Engine` remains related to the original
:class:`_engine.Engine` in that it shares the same connection pool and
other state:
* The :class:`_pool.Pool` used by the new :class:`_engine.Engine`
is the
same instance. The :meth:`_engine.Engine.dispose`
method will replace
the connection pool instance for the parent engine as well
as this one.
* Event listeners are "cascaded" - meaning, the new
:class:`_engine.Engine`
inherits the events of the parent, and new events can be associated
with the new :class:`_engine.Engine` individually.
* The logging configuration and logging_name is copied from the parent
:class:`_engine.Engine`.
The intent of the :meth:`_engine.Engine.execution_options` method is
to implement "sharding" schemes where multiple :class:`_engine.Engine`
objects refer to the same connection pool, but are differentiated
by options that would be consumed by a custom event::
primary_engine = create_engine("mysql://")
shard1 = primary_engine.execution_options(shard_id="shard1")
shard2 = primary_engine.execution_options(shard_id="shard2")
Above, the ``shard1`` engine serves as a factory for
:class:`_engine.Connection`
objects that will contain the execution option
``shard_id=shard1``, and ``shard2`` will produce
:class:`_engine.Connection`
objects that contain the execution option ``shard_id=shard2``.
An event handler can consume the above execution option to perform
a schema switch or other operation, given a connection. Below
we emit a MySQL ``use`` statement to switch databases, at the same
time keeping track of which database we've established using the
:attr:`_engine.Connection.info` dictionary,
which gives us a persistent
storage space that follows the DBAPI connection::
from sqlalchemy import event
from sqlalchemy.engine import Engine
shards = {"default": "base", shard_1: "db1", "shard_2": "db2"}
@event.listens_for(Engine, "before_cursor_execute")
def _switch_shard(conn, cursor, stmt,
params, context, executemany):
shard_id = conn._execution_options.get('shard_id', "default")
current_shard = conn.info.get("current_shard", None)
if current_shard != shard_id:
cursor.execute("use %s" % shards[shard_id])
conn.info["current_shard"] = shard_id
.. seealso::
:meth:`_engine.Connection.execution_options`
- update execution options
on a :class:`_engine.Connection` object.
:meth:`_engine.Engine.update_execution_options`
- update the execution
options for a given :class:`_engine.Engine` in place.
:meth:`_engine.Engine.get_execution_options`
"""
return self._option_cls(self, opt)
def get_execution_options(self):
"""Get the non-SQL options which will take effect during execution.
.. versionadded: 1.3
.. seealso::
:meth:`_engine.Engine.execution_options`
"""
return self._execution_options
@property
def name(self):
"""String name of the :class:`~sqlalchemy.engine.interfaces.Dialect`
in use by this :class:`Engine`."""
return self.dialect.name
@property
def driver(self):
"""Driver name of the :class:`~sqlalchemy.engine.interfaces.Dialect`
in use by this :class:`Engine`."""
return self.dialect.driver
echo = log.echo_property()
def __repr__(self):
return "Engine(%r)" % (self.url,)
def dispose(self):
"""Dispose of the connection pool used by this
:class:`_engine.Engine`.
This has the effect of fully closing all **currently checked in**
database connections. Connections that are still checked out
will **not** be closed, however they will no longer be associated
with this :class:`_engine.Engine`,
so when they are closed individually,
eventually the :class:`_pool.Pool` which they are associated with will
be garbage collected and they will be closed out fully, if
not already closed on checkin.
A new connection pool is created immediately after the old one has
been disposed. This new pool, like all SQLAlchemy connection pools,
does not make any actual connections to the database until one is
first requested, so as long as the :class:`_engine.Engine`
isn't used again,
no new connections will be made.
.. seealso::
:ref:`engine_disposal`
"""
self.pool.dispose()
self.pool = self.pool.recreate()
self.dispatch.engine_disposed(self)
def _execute_default(
self, default, multiparams=(), params=util.EMPTY_DICT
):
with self.connect() as conn:
return conn._execute_default(default, multiparams, params)
@contextlib.contextmanager
def _optional_conn_ctx_manager(self, connection=None):
if connection is None:
with self.connect() as conn:
yield conn
else:
yield connection
class _trans_ctx(object):
def __init__(self, conn, transaction, close_with_result):
self.conn = conn
self.transaction = transaction
self.close_with_result = close_with_result
def __enter__(self):
self.transaction.__enter__()
return self.conn
def __exit__(self, type_, value, traceback):
try:
self.transaction.__exit__(type_, value, traceback)
finally:
if not self.close_with_result:
self.conn.close()
def begin(self, close_with_result=False):
"""Return a context manager delivering a :class:`_engine.Connection`
with a :class:`.Transaction` established.
E.g.::
with engine.begin() as conn:
conn.execute(
text("insert into table (x, y, z) values (1, 2, 3)")
)
conn.execute(text("my_special_procedure(5)"))
Upon successful operation, the :class:`.Transaction`
is committed. If an error is raised, the :class:`.Transaction`
is rolled back.
Legacy use only: the ``close_with_result`` flag is normally ``False``,
and indicates that the :class:`_engine.Connection` will be closed when
the operation is complete. When set to ``True``, it indicates the
:class:`_engine.Connection` is in "single use" mode, where the
:class:`_engine.CursorResult` returned by the first call to
:meth:`_engine.Connection.execute` will close the
:class:`_engine.Connection` when that :class:`_engine.CursorResult` has
exhausted all result rows.
.. seealso::
:meth:`_engine.Engine.connect` - procure a
:class:`_engine.Connection` from
an :class:`_engine.Engine`.
:meth:`_engine.Connection.begin` - start a :class:`.Transaction`
for a particular :class:`_engine.Connection`.
"""
if self._connection_cls._is_future:
conn = self.connect()
else:
conn = self.connect(close_with_result=close_with_result)
try:
trans = conn.begin()
except:
with util.safe_reraise():
conn.close()
return Engine._trans_ctx(conn, trans, close_with_result)
@util.deprecated(
"1.4",
"The :meth:`_engine.Engine.transaction` "
"method is deprecated and will be "
"removed in a future release. Use the :meth:`_engine.Engine.begin` "
"context "
"manager instead.",
)
def transaction(self, callable_, *args, **kwargs):
r"""Execute the given function within a transaction boundary.
The function is passed a :class:`_engine.Connection` newly procured
from :meth:`_engine.Engine.connect` as the first argument,
followed by the given \*args and \**kwargs.
e.g.::
def do_something(conn, x, y):
conn.execute(text("some statement"), {'x':x, 'y':y})
engine.transaction(do_something, 5, 10)
The operations inside the function are all invoked within the
context of a single :class:`.Transaction`.
Upon success, the transaction is committed. If an
exception is raised, the transaction is rolled back
before propagating the exception.
.. note::
The :meth:`.transaction` method is superseded by
the usage of the Python ``with:`` statement, which can
be used with :meth:`_engine.Engine.begin`::
with engine.begin() as conn:
conn.execute(text("some statement"), {'x':5, 'y':10})
.. seealso::
:meth:`_engine.Engine.begin` - engine-level transactional
context
:meth:`_engine.Connection.transaction`
- connection-level version of
:meth:`_engine.Engine.transaction`
"""
kwargs["_sa_skip_warning"] = True
with self.connect() as conn:
return conn.transaction(callable_, *args, **kwargs)
@util.deprecated(
"1.4",
"The :meth:`_engine.Engine.run_callable` "
"method is deprecated and will be "
"removed in a future release. Use the :meth:`_engine.Engine.begin` "
"context manager instead.",
)
def run_callable(self, callable_, *args, **kwargs):
r"""Given a callable object or function, execute it, passing
a :class:`_engine.Connection` as the first argument.
The given \*args and \**kwargs are passed subsequent
to the :class:`_engine.Connection` argument.
This function, along with :meth:`_engine.Connection.run_callable`,
allows a function to be run with a :class:`_engine.Connection`
or :class:`_engine.Engine` object without the need to know
which one is being dealt with.
"""
kwargs["_sa_skip_warning"] = True
with self.connect() as conn:
return conn.run_callable(callable_, *args, **kwargs)
def _run_ddl_visitor(self, visitorcallable, element, **kwargs):
with self.begin() as conn:
conn._run_ddl_visitor(visitorcallable, element, **kwargs)
@util.deprecated_20(
":meth:`_engine.Engine.execute`",
alternative="All statement execution in SQLAlchemy 2.0 is performed "
"by the :meth:`_engine.Connection.execute` method of "
":class:`_engine.Connection`, "
"or in the ORM by the :meth:`.Session.execute` method of "
":class:`.Session`.",
)
def execute(self, statement, *multiparams, **params):
"""Executes the given construct and returns a
:class:`_engine.CursorResult`.
The arguments are the same as those used by
:meth:`_engine.Connection.execute`.
Here, a :class:`_engine.Connection` is acquired using the
:meth:`_engine.Engine.connect` method, and the statement executed
with that connection. The returned :class:`_engine.CursorResult`
is flagged
such that when the :class:`_engine.CursorResult` is exhausted and its
underlying cursor is closed, the :class:`_engine.Connection`
created here
will also be closed, which allows its associated DBAPI connection
resource to be returned to the connection pool.
"""
connection = self.connect(close_with_result=True)
return connection.execute(statement, *multiparams, **params)
@util.deprecated_20(
":meth:`_engine.Engine.scalar`",
alternative="All statement execution in SQLAlchemy 2.0 is performed "
"by the :meth:`_engine.Connection.execute` method of "
":class:`_engine.Connection`, "
"or in the ORM by the :meth:`.Session.execute` method of "
":class:`.Session`; the :meth:`_future.Result.scalar` "
"method can then be "
"used to return a scalar result.",
)
def scalar(self, statement, *multiparams, **params):
"""Executes and returns the first column of the first row.
The underlying result/cursor is closed after execution.
"""
return self.execute(statement, *multiparams, **params).scalar()
def _execute_clauseelement(
self,
elem,
multiparams=None,
params=None,
execution_options=_EMPTY_EXECUTION_OPTS,
):
connection = self.connect(close_with_result=True)
return connection._execute_clauseelement(
elem, multiparams, params, execution_options
)
def _execute_compiled(
self,
compiled,
multiparams,
params,
execution_options=_EMPTY_EXECUTION_OPTS,
):
connection = self.connect(close_with_result=True)
return connection._execute_compiled(
compiled, multiparams, params, execution_options
)
def connect(self, close_with_result=False):
"""Return a new :class:`_engine.Connection` object.
The :class:`_engine.Connection` object is a facade that uses a DBAPI
connection internally in order to communicate with the database. This
connection is procured from the connection-holding :class:`_pool.Pool`
referenced by this :class:`_engine.Engine`. When the
:meth:`_engine.Connection.close` method of the
:class:`_engine.Connection` object
is called, the underlying DBAPI connection is then returned to the
connection pool, where it may be used again in a subsequent call to
:meth:`_engine.Engine.connect`.
"""
return self._connection_cls(self, close_with_result=close_with_result)
@util.deprecated(
"1.4",
"The :meth:`_engine.Engine.table_names` "
"method is deprecated and will be "
"removed in a future release. Please refer to "
":meth:`_reflection.Inspector.get_table_names`.",
)
def table_names(self, schema=None, connection=None):
"""Return a list of all table names available in the database.
:param schema: Optional, retrieve names from a non-default schema.
:param connection: Optional, use a specified connection.
"""
with self._optional_conn_ctx_manager(connection) as conn:
insp = inspection.inspect(conn)
return insp.get_table_names(schema)
@util.deprecated(
"1.4",
"The :meth:`_engine.Engine.has_table` "
"method is deprecated and will be "
"removed in a future release. Please refer to "
":meth:`_reflection.Inspector.has_table`.",
)
def has_table(self, table_name, schema=None):
"""Return True if the given backend has a table of the given name.
.. seealso::
:ref:`metadata_reflection_inspector` - detailed schema inspection
using the :class:`_reflection.Inspector` interface.
:class:`.quoted_name` - used to pass quoting information along
with a schema identifier.
"""
with self._optional_conn_ctx_manager(None) as conn:
insp = inspection.inspect(conn)
return insp.has_table(table_name, schema=schema)
def _wrap_pool_connect(self, fn, connection):
dialect = self.dialect
try:
return fn()
except dialect.dbapi.Error as e:
if connection is None:
Connection._handle_dbapi_exception_noconnection(
e, dialect, self
)
else:
util.raise_(
sys.exc_info()[1], with_traceback=sys.exc_info()[2]
)
def raw_connection(self, _connection=None):
"""Return a "raw" DBAPI connection from the connection pool.
The returned object is a proxied version of the DBAPI
connection object used by the underlying driver in use.
The object will have all the same behavior as the real DBAPI
connection, except that its ``close()`` method will result in the
connection being returned to the pool, rather than being closed
for real.
This method provides direct DBAPI connection access for
special situations when the API provided by
:class:`_engine.Connection`
is not needed. When a :class:`_engine.Connection` object is already
present, the DBAPI connection is available using
the :attr:`_engine.Connection.connection` accessor.
.. seealso::
:ref:`dbapi_connections`
"""
return self._wrap_pool_connect(self.pool.connect, _connection)
class OptionEngineMixin(object):
_sa_propagate_class_events = False
def __init__(self, proxied, execution_options):
self._proxied = proxied
self.url = proxied.url
self.dialect = proxied.dialect
self.logging_name = proxied.logging_name
self.echo = proxied.echo
self._compiled_cache = proxied._compiled_cache
self.hide_parameters = proxied.hide_parameters
log.instance_logger(self, echoflag=self.echo)
# note: this will propagate events that are assigned to the parent
# engine after this OptionEngine is created. Since we share
# the events of the parent we also disallow class-level events
# to apply to the OptionEngine class directly.
#
# the other way this can work would be to transfer existing
# events only, using:
# self.dispatch._update(proxied.dispatch)
#
# that might be more appropriate however it would be a behavioral
# change for logic that assigns events to the parent engine and
# would like it to take effect for the already-created sub-engine.
self.dispatch = self.dispatch._join(proxied.dispatch)
self._execution_options = proxied._execution_options
self.update_execution_options(**execution_options)
def _get_pool(self):
return self._proxied.pool
def _set_pool(self, pool):
self._proxied.pool = pool
pool = property(_get_pool, _set_pool)
def _get_has_events(self):
return self._proxied._has_events or self.__dict__.get(
"_has_events", False
)
def _set_has_events(self, value):
self.__dict__["_has_events"] = value
_has_events = property(_get_has_events, _set_has_events)
class OptionEngine(OptionEngineMixin, Engine):
pass
Engine._option_cls = OptionEngine
|
mit
|
xxvii27/gae-pushtest
|
bp_includes/external/requests/packages/chardet/chardistribution.py
|
2755
|
9226
|
######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from .euctwfreq import (EUCTWCharToFreqOrder, EUCTW_TABLE_SIZE,
EUCTW_TYPICAL_DISTRIBUTION_RATIO)
from .euckrfreq import (EUCKRCharToFreqOrder, EUCKR_TABLE_SIZE,
EUCKR_TYPICAL_DISTRIBUTION_RATIO)
from .gb2312freq import (GB2312CharToFreqOrder, GB2312_TABLE_SIZE,
GB2312_TYPICAL_DISTRIBUTION_RATIO)
from .big5freq import (Big5CharToFreqOrder, BIG5_TABLE_SIZE,
BIG5_TYPICAL_DISTRIBUTION_RATIO)
from .jisfreq import (JISCharToFreqOrder, JIS_TABLE_SIZE,
JIS_TYPICAL_DISTRIBUTION_RATIO)
from .compat import wrap_ord
ENOUGH_DATA_THRESHOLD = 1024
SURE_YES = 0.99
SURE_NO = 0.01
MINIMUM_DATA_THRESHOLD = 3
class CharDistributionAnalysis:
def __init__(self):
# Mapping table to get frequency order from char order (get from
# GetOrder())
self._mCharToFreqOrder = None
self._mTableSize = None # Size of above table
# This is a constant value which varies from language to language,
# used in calculating confidence. See
# http://www.mozilla.org/projects/intl/UniversalCharsetDetection.html
# for further detail.
self._mTypicalDistributionRatio = None
self.reset()
def reset(self):
"""reset analyser, clear any state"""
# If this flag is set to True, detection is done and conclusion has
# been made
self._mDone = False
self._mTotalChars = 0 # Total characters encountered
# The number of characters whose frequency order is less than 512
self._mFreqChars = 0
def feed(self, aBuf, aCharLen):
"""feed a character with known length"""
if aCharLen == 2:
# we only care about 2-bytes character in our distribution analysis
order = self.get_order(aBuf)
else:
order = -1
if order >= 0:
self._mTotalChars += 1
# order is valid
if order < self._mTableSize:
if 512 > self._mCharToFreqOrder[order]:
self._mFreqChars += 1
def get_confidence(self):
"""return confidence based on existing data"""
# if we didn't receive any character in our consideration range,
# return negative answer
if self._mTotalChars <= 0 or self._mFreqChars <= MINIMUM_DATA_THRESHOLD:
return SURE_NO
if self._mTotalChars != self._mFreqChars:
r = (self._mFreqChars / ((self._mTotalChars - self._mFreqChars)
* self._mTypicalDistributionRatio))
if r < SURE_YES:
return r
# normalize confidence (we don't want to be 100% sure)
return SURE_YES
def got_enough_data(self):
# It is not necessary to receive all data to draw conclusion.
# For charset detection, certain amount of data is enough
return self._mTotalChars > ENOUGH_DATA_THRESHOLD
def get_order(self, aBuf):
# We do not handle characters based on the original encoding string,
# but convert this encoding string to a number, here called order.
# This allows multiple encodings of a language to share one frequency
# table.
return -1
class EUCTWDistributionAnalysis(CharDistributionAnalysis):
def __init__(self):
CharDistributionAnalysis.__init__(self)
self._mCharToFreqOrder = EUCTWCharToFreqOrder
self._mTableSize = EUCTW_TABLE_SIZE
self._mTypicalDistributionRatio = EUCTW_TYPICAL_DISTRIBUTION_RATIO
def get_order(self, aBuf):
# for euc-TW encoding, we are interested
# first byte range: 0xc4 -- 0xfe
# second byte range: 0xa1 -- 0xfe
# no validation needed here. State machine has done that
first_char = wrap_ord(aBuf[0])
if first_char >= 0xC4:
return 94 * (first_char - 0xC4) + wrap_ord(aBuf[1]) - 0xA1
else:
return -1
class EUCKRDistributionAnalysis(CharDistributionAnalysis):
def __init__(self):
CharDistributionAnalysis.__init__(self)
self._mCharToFreqOrder = EUCKRCharToFreqOrder
self._mTableSize = EUCKR_TABLE_SIZE
self._mTypicalDistributionRatio = EUCKR_TYPICAL_DISTRIBUTION_RATIO
def get_order(self, aBuf):
# for euc-KR encoding, we are interested
# first byte range: 0xb0 -- 0xfe
# second byte range: 0xa1 -- 0xfe
# no validation needed here. State machine has done that
first_char = wrap_ord(aBuf[0])
if first_char >= 0xB0:
return 94 * (first_char - 0xB0) + wrap_ord(aBuf[1]) - 0xA1
else:
return -1
class GB2312DistributionAnalysis(CharDistributionAnalysis):
def __init__(self):
CharDistributionAnalysis.__init__(self)
self._mCharToFreqOrder = GB2312CharToFreqOrder
self._mTableSize = GB2312_TABLE_SIZE
self._mTypicalDistributionRatio = GB2312_TYPICAL_DISTRIBUTION_RATIO
def get_order(self, aBuf):
# for GB2312 encoding, we are interested
# first byte range: 0xb0 -- 0xfe
# second byte range: 0xa1 -- 0xfe
# no validation needed here. State machine has done that
first_char, second_char = wrap_ord(aBuf[0]), wrap_ord(aBuf[1])
if (first_char >= 0xB0) and (second_char >= 0xA1):
return 94 * (first_char - 0xB0) + second_char - 0xA1
else:
return -1
class Big5DistributionAnalysis(CharDistributionAnalysis):
def __init__(self):
CharDistributionAnalysis.__init__(self)
self._mCharToFreqOrder = Big5CharToFreqOrder
self._mTableSize = BIG5_TABLE_SIZE
self._mTypicalDistributionRatio = BIG5_TYPICAL_DISTRIBUTION_RATIO
def get_order(self, aBuf):
# for big5 encoding, we are interested
# first byte range: 0xa4 -- 0xfe
# second byte range: 0x40 -- 0x7e , 0xa1 -- 0xfe
# no validation needed here. State machine has done that
first_char, second_char = wrap_ord(aBuf[0]), wrap_ord(aBuf[1])
if first_char >= 0xA4:
if second_char >= 0xA1:
return 157 * (first_char - 0xA4) + second_char - 0xA1 + 63
else:
return 157 * (first_char - 0xA4) + second_char - 0x40
else:
return -1
class SJISDistributionAnalysis(CharDistributionAnalysis):
def __init__(self):
CharDistributionAnalysis.__init__(self)
self._mCharToFreqOrder = JISCharToFreqOrder
self._mTableSize = JIS_TABLE_SIZE
self._mTypicalDistributionRatio = JIS_TYPICAL_DISTRIBUTION_RATIO
def get_order(self, aBuf):
# for sjis encoding, we are interested
# first byte range: 0x81 -- 0x9f , 0xe0 -- 0xfe
# second byte range: 0x40 -- 0x7e, 0x81 -- oxfe
# no validation needed here. State machine has done that
first_char, second_char = wrap_ord(aBuf[0]), wrap_ord(aBuf[1])
if (first_char >= 0x81) and (first_char <= 0x9F):
order = 188 * (first_char - 0x81)
elif (first_char >= 0xE0) and (first_char <= 0xEF):
order = 188 * (first_char - 0xE0 + 31)
else:
return -1
order = order + second_char - 0x40
if second_char > 0x7F:
order = -1
return order
class EUCJPDistributionAnalysis(CharDistributionAnalysis):
def __init__(self):
CharDistributionAnalysis.__init__(self)
self._mCharToFreqOrder = JISCharToFreqOrder
self._mTableSize = JIS_TABLE_SIZE
self._mTypicalDistributionRatio = JIS_TYPICAL_DISTRIBUTION_RATIO
def get_order(self, aBuf):
# for euc-JP encoding, we are interested
# first byte range: 0xa0 -- 0xfe
# second byte range: 0xa1 -- 0xfe
# no validation needed here. State machine has done that
char = wrap_ord(aBuf[0])
if char >= 0xA0:
return 94 * (char - 0xA1) + wrap_ord(aBuf[1]) - 0xa1
else:
return -1
|
lgpl-3.0
|
charleslaw/pyfft
|
fft.py
|
1
|
6159
|
# -*- coding: utf-8 -*-
"""
Created on Sun Aug 29 00:09:26 2010
@author: Charles Law
"""
import math
def fft(fin, inverse):
nfft = len(fin)
twiddles, factors = fft_alloc(nfft, inverse)
fout = []
for i in xrange(nfft):
fout.append((0, 0))
fout_ind_start = 0
fin_ind_start = 0
in_stride = 1
fft_work(fout, fout_ind_start, fin, fin_ind_start, 1, in_stride, factors,
twiddles, nfft)
return fout
def fft_work(fout, fout_ind, f, f_ind, fstride, in_stride, factors,
twiddles, nfft_orig):
p = factors[0][0] # the radix
m = factors[0][1] # stage's fft length/p
factors = factors[1:]
fout_beg = fout_ind
fout_end = fout_ind + p*m
if m == 1 :
dowhile_if = 1
while ( dowhile_if ):
fout[fout_ind] = f[f_ind]
f_ind = f_ind + fstride*in_stride
fout_ind = fout_ind + 1
if fout_ind == fout_end:
dowhile_if = 0
else:
dowhile_if = 1
while ( dowhile_if ):
# recursive call:
# DFT of size m*p performed by doing
# p instances of smaller DFTs of size m,
# each one takes a decimated version of the input
fft_work(fout, fout_ind , f, f_ind, fstride*p, in_stride,
factors, twiddles, nfft_orig)
f_ind = f_ind + fstride*in_stride
#}while( (fout += m) != fout_end )
fout_ind = fout_ind + m
if ( fout_ind == fout_end ):
dowhile_if = 0
fout_ind = fout_beg
# recombine the p smaller DFTs
if p == 2:
fft_bfly2(fout, fout_ind, fstride, twiddles, m)
elif p == 3:
fft_bfly3(fout, fout_ind, fstride, twiddles, m)
else:
fft_bfly_generic(fout, fout_ind, fstride, twiddles, m, p, nfft_orig)
return fout
def fft_bfly2(fout, fout_ind, fstride, twiddles, m):
tw1_ind = 0
fout2_ind = fout_ind + m
dowhile_if = 1
while(dowhile_if):
t = _mult ( fout[fout2_ind], twiddles[tw1_ind] )
tw1_ind = tw1_ind + fstride
fout[fout2_ind] = _sub( fout[fout_ind], t )
fout[fout_ind] = _addto( fout[fout_ind], t )
fout2_ind = fout2_ind + 1
fout_ind = fout_ind + 1
m -= 1
if not(m):
dowhile_if = 0
return fout
def fft_bfly3(fout, fout_ind, fstride, twiddles, m):
k = m
m2 = 2*m
scratch = [(0, 0), (0, 0), (0, 0), (0, 0)]
epi3_i = twiddles[fstride*m][1]
tw1_ind = 0
tw2_ind = tw1_ind
dowhile_if = 1
while (dowhile_if):
scratch[1] = _mult( fout[fout_ind+m], twiddles[tw1_ind] )
scratch[2] = _mult( fout[fout_ind+m2], twiddles[tw2_ind] )
scratch[3] = _add( scratch[1], scratch[2] )
scratch[0] = _sub( scratch[1], scratch[2] )
tw1_ind = tw1_ind + fstride
tw2_ind = tw2_ind + fstride*2
fout[fout_ind+m] = ( fout[fout_ind][0] - (scratch[3][0])/2, \
fout[fout_ind][1] - (scratch[3][1])/2 )
scratch[0] = _mult_by_scalar( scratch[0], epi3_i )
fout[fout_ind] = _addto( fout[fout_ind], scratch[3] )
fout[fout_ind+m2] = ( fout[fout_ind+m][0] + scratch[0][1], \
fout[fout_ind+m][1] - scratch[0][0] )
fout[fout_ind+m] = ( fout[fout_ind+m][0] - scratch[0][1], \
fout[fout_ind+m][1] + scratch[0][0] )
fout_ind = fout_ind + 1
k -= 1
if not(k):
dowhile_if = 0
return fout
def fft_bfly_generic(fout, fout_ind, fstride, twiddles, m, p, nfft_orig):
n_orig = nfft_orig
# initialize scratch
scratch = []
for q1 in xrange(p): #( q1=0 ; q1<p ; ++q1 )
scratch.append(0)
for u in xrange(m): #( u=0; u<m; ++u )
k = u
for q1 in xrange(p): #( q1=0 ; q1<p ; ++q1 )
scratch[q1] = fout[fout_ind+k]
k = k + m
k = u
for q1 in xrange(p):
twidx = 0
fout[fout_ind+k] = scratch[0]
for q in xrange(1, p):
twidx = twidx + fstride * k
if (twidx >= n_orig):
twidx = twidx - nfft_orig
t = _mult( scratch[q], twiddles[twidx] )
fout[fout_ind+k] = _addto( fout[fout_ind+k], t )
k = k + m
return fout
def fft_alloc(nfft, inverse):
twiddles = []
for i in xrange(nfft):
phase = -2*math.pi*float(i) / float(nfft)
if (inverse):
phase = phase * float(-1)
twiddles.append(fft_cexp(phase))
factors = fft_factor(nfft)
return twiddles, factors
def fft_cexp(phase):
x = (math.cos(phase), math.sin(phase))
return x
def fft_factor(n):
facbuf = []
p = 4
floor_sqrt = math.floor( math.sqrt( float(n) ) )
# factor out powers of 4, powers of 2, then any remaining primes
dowhile_test = 1
while (dowhile_test):
while n % p:
if p == 4:
p = 2
elif p == 2:
p = 3
else:
p = p + 2
if (p > floor_sqrt):
p = n # no more factors, skip to end
n = n / p
facbuf.append((p, n))
if not(n > 1):
dowhile_test = 0
return facbuf
def _mult( a, b ):
return ( a[0]*b[0] - a[1]*b[1], a[0]*b[1] + a[1]*b[0] )
def _sub( a, b ):
return ( a[0]-b[0], a[1]-b[1] )
def _add( a, b ):
return ( a[0] + b[0], a[1] + b[1] )
def _addto( res , a):
return ( res[0] + a[0], res[1] + a[1] )
def _mult_by_scalar( c, s ):
return ( c[0] * s, c[1] * s)
def main():
fin = [(0, 0), (1, 0), (1, 0), (1, 0), (1, 0), (0, 0)]
inverse = 0
print fft(fin, inverse)
if __name__ == '__main__':
main()
|
bsd-3-clause
|
onesfreedom/pybuilder
|
src/unittest/python/plugins/python/pyfix_unittest_plugin_tests.py
|
6
|
1779
|
# -*- coding: utf-8 -*-
#
# This file is part of PyBuilder
#
# Copyright 2011-2015 PyBuilder Team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from unittest import TestCase
from mock import Mock, call, patch
from pybuilder.core import Project
from pybuilder.plugins.python.pyfix_unittest_plugin import init_test_source_directory
class InitTestSourceDirectoryTests(TestCase):
@patch('pybuilder.plugins.python.pyfix_plugin_impl.execute_tests_matching')
def test_should_set_pyfix_dependency(self, mock_execute_tests_matching):
mock_project = Mock(Project)
init_test_source_directory(mock_project)
mock_project.build_depends_on.assert_called_with('pyfix')
@patch('pybuilder.plugins.python.pyfix_plugin_impl.execute_tests_matching')
def test_should_set_default_properties(self, mock_execute_tests_matching):
mock_project = Mock(Project)
init_test_source_directory(mock_project)
self.assertEquals(mock_project.set_property_if_unset.call_args_list,
[call('dir_source_unittest_python', 'src/unittest/python'),
call('pyfix_unittest_module_glob', '*_pyfix_tests'),
call('pyfix_unittest_file_suffix', None)])
|
apache-2.0
|
mrunge/horizon
|
openstack_dashboard/dashboards/admin/volumes/volume_types/forms.py
|
4
|
5903
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.utils.translation import ugettext_lazy as _
from horizon import exceptions
from horizon import forms
from horizon import messages
from openstack_dashboard.api import cinder
class ManageQosSpecAssociation(forms.SelfHandlingForm):
qos_spec_choice = forms.ChoiceField(
label=_("QoS Spec to be associated"),
help_text=_("Choose associated QoS Spec."))
def __init__(self, request, *args, **kwargs):
super(ManageQosSpecAssociation, self).__init__(request,
*args,
**kwargs)
qos_spec_field = self.fields['qos_spec_choice']
qos_spec_field.choices = \
self.populate_qos_spec_choices()
# pre-select the current qos spec, if exists
# if no association exists, the selected entry will be "None"
# since it is index 0 of the choice box
current_qos_spec = self.initial["cur_qos_spec_id"]
if current_qos_spec:
qos_spec_field.initial = current_qos_spec
def populate_qos_spec_choices(self):
# populate qos spec list box
qos_specs = self.initial["qos_specs"]
qos_spec_list = [(qos_spec.id, qos_spec.name)
for qos_spec in qos_specs]
# 'none' is always listed first
qos_spec_list.insert(0, ("-1", _("None")))
return qos_spec_list
def clean_qos_spec_choice(self):
# ensure that new association isn't the same as current association
cleaned_new_spec_id = self.cleaned_data.get('qos_spec_choice')
cur_spec_id = self.initial['cur_qos_spec_id']
found_error = False
if cur_spec_id:
# new = current
if cur_spec_id == cleaned_new_spec_id:
found_error = True
else:
# no current association
if cleaned_new_spec_id == '-1':
# new = current
found_error = True
if found_error:
raise forms.ValidationError(
_('New associated QoS Spec must be different than '
'the current associated QoS Spec.'))
return cleaned_new_spec_id
def handle(self, request, data):
vol_type_id = self.initial['type_id']
new_qos_spec_id = data['qos_spec_choice']
# Update QOS Spec association information
try:
# NOTE - volume types can only be associated with
# ONE QOS Spec at a time
# first we need to un-associate the current QOS Spec, if it exists
cur_qos_spec_id = self.initial['cur_qos_spec_id']
if cur_qos_spec_id:
qos_spec = cinder.qos_spec_get(request,
cur_qos_spec_id)
cinder.qos_spec_disassociate(request,
qos_spec,
vol_type_id)
# now associate with new QOS Spec, if user wants one associated
if new_qos_spec_id != '-1':
qos_spec = cinder.qos_spec_get(request,
new_qos_spec_id)
cinder.qos_spec_associate(request,
qos_spec,
vol_type_id)
messages.success(request,
_('Successfully updated QoS Spec association.'))
return True
except Exception:
exceptions.handle(request,
_('Error updating QoS Spec association.'))
return False
class EditQosSpecConsumer(forms.SelfHandlingForm):
consumer_choice = forms.ChoiceField(
label=_("QoS Spec Consumer"),
choices=cinder.CONSUMER_CHOICES,
help_text=_("Choose consumer for this QoS Spec."))
def __init__(self, request, *args, **kwargs):
super(EditQosSpecConsumer, self).__init__(request, *args, **kwargs)
consumer_field = self.fields['consumer_choice']
qos_spec = self.initial["qos_spec"]
consumer_field.initial = qos_spec.consumer
def clean_consumer_choice(self):
# ensure that new consumer isn't the same as current consumer
qos_spec = self.initial['qos_spec']
cleaned_new_consumer = self.cleaned_data.get('consumer_choice')
old_consumer = qos_spec.consumer
if cleaned_new_consumer == old_consumer:
raise forms.ValidationError(
_('QoS Spec consumer value must be different than '
'the current consumer value.'))
return cleaned_new_consumer
def handle(self, request, data):
qos_spec_id = self.initial['qos_spec_id']
new_consumer = data['consumer_choice']
# Update QOS Spec consumer information
try:
cinder.qos_spec_set_keys(request,
qos_spec_id,
{'consumer': new_consumer})
messages.success(request,
_('Successfully modified QoS Spec consumer.'))
return True
except Exception:
exceptions.handle(request, _('Error editing QoS Spec consumer.'))
return False
|
apache-2.0
|
magicrub/MissionPlanner
|
Lib/site-packages/scipy/cluster/doc/ex1.py
|
61
|
1214
|
from scipy import *
from scipy.cluster import vq
def cluster_data(data,cluster_cnt,iter=20,thresh=1e-5):
""" Group data into a number of common clusters
data -- 2D array of data points. Each point is a row in the array.
cluster_cnt -- The number of clusters to use
iter -- number of iterations to use for kmeans algorithm
thresh -- distortion threshold for kmeans algorithm
return -- list of 2D arrays. Each array contains the data points
that belong to a specific cluster.
Uses kmeans algorithm to find the clusters.
"""
wh_data = vq.whiten(data)
code_book,dist = vq.kmeans(wh_data,cluster_cnt,iter,thresh)
code_ids, distortion = vq.vq(wh_data,code_book)
clusters = []
for i in range(len(code_book)):
cluster = compress(code_ids == i,data,0)
clusters.append(cluster)
return clusters
if __name__ == "__main__":
data = array(((400, 79, 5.4),
(180, 76, 4.5),
(28, 25, 30.),
(270, 81, 5.0),
(185, 78, 4.6)))
clusters = cluster_data(data,2)
for i in range(len(clusters)):
print 'cluster %d:' % i
print clusters[i]
|
gpl-3.0
|
raybrshen/pattern_recognition
|
noise_detection/tools/filter_noise.py
|
1
|
3213
|
__author__ = 'ray'
import os
from subprocess import Popen,PIPE
import wave
import numpy as np
def run_comm(comm):
process = Popen(comm, stdout=PIPE)
(std_out,std_err) = process.communicate()
exit_code = process.wait()
if std_out: print std_out
if std_err!=None: print std_err
def run_comm_save_output(comm, out_file):
process = Popen(comm, stdout=PIPE)
(std_out,std_err) = process.communicate()
exit_code = process.wait()
of = open(out_file, 'w')
of.write(std_out)
of.close()
if std_err!=None: print std_err
in_wav_path = 'frame_based-clap_cough_speech_env/test/test_clap'
out_wav_path = 'frame_based-clap_cough_speech_env/test/test_clap_filter.wav'
smile_exe = 'frame_based-clap_cough_speech_env/tools/SMILExtract'
config_file = 'frame_based-clap_cough_speech_env/tools/step_30_align_mid_without_energy.conf'
scale_exe = 'frame_based-clap_cough_speech_env/tools/svm-scale'
range_file = 'frame_based-clap_cough_speech_env/model/training_set.range'
predict_exe = 'frame_based-clap_cough_speech_env/tools/svm-predict'
model_file = 'frame_based-clap_cough_speech_env/model/training_set.model'
# extract features with openSMILE
assert os.path.isfile(in_wav_path+'.wav')
comm = [smile_exe,'-C',config_file,'-I',in_wav_path+'.wav',"-O",in_wav_path+'.rawdata']
run_comm(comm)
# scale the features
assert os.path.isfile(in_wav_path+'.rawdata')
comm = [scale_exe, '-r', range_file, in_wav_path+'.rawdata']
run_comm_save_output(comm, in_wav_path+".scale")
os.remove(in_wav_path+'.rawdata')
# predict result from features
assert os.path.isfile(in_wav_path+'.scale')
comm = [predict_exe, '-b', '1', in_wav_path+'.scale', model_file, in_wav_path+'.label']
run_comm(comm)
os.remove(in_wav_path+'.scale')
# open audio file
in_wav = wave.open(in_wav_path+'.wav','rb')
nf = in_wav.getnframes()
# set parameters of the output wave the same as input wave
out_wav = wave.open(out_wav_path, 'wb')
out_wav.setparams(in_wav.getparams())
# process label file to get noise frames
assert os.path.isfile(in_wav_path+'.label')
# define lists for packing data
pmat = np.zeros((nf,4))
with open(in_wav_path+'.label') as f:
contents = f.readlines()
contents = contents[1:]
# start the possibility overlapping process
left = -661
right = 661
for idx, line in enumerate(contents):
#print line
label,p3,p2,p1,p0 = line.split(' ')
#pmat[idx*441:(idx+3)*441-1,:] = pmat[idx*441:(idx+3)*441-1,:] + np.array([float(p0),float(p1),float(p2),float(p3)])
tmp_left=0 if left<0 else left
tmp_right=nf if right>nf else right
pmat[tmp_left:tmp_right+1,:] = pmat[tmp_left:tmp_right+1,:] + np.array([float(p0),float(p1),float(p2),float(p3)])
left += 441
right += 441
os.remove(in_wav_path+'.label')
# get label of each frame
labels = np.argmax(pmat, 1)
# extract signal value from bytes
in_signal = np.fromstring(in_wav.readframes(-1), 'Int16')
# assert consistency of frame number
assert len(in_signal)==len(labels)
# write to output wave only non-noise frames
out_wav.writeframes(in_signal[labels!=2].tostring())
# close files
in_wav.close()
out_wav.close()
assert os.path.isfile(out_wav_path)
print 'done!'
|
apache-2.0
|
lcy03406/protobuf
|
python/google/protobuf/internal/api_implementation.py
|
229
|
3438
|
# Protocol Buffers - Google's data interchange format
# Copyright 2008 Google Inc. All rights reserved.
# http://code.google.com/p/protobuf/
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
This module is the central entity that determines which implementation of the
API is used.
"""
__author__ = '[email protected] (Petar Petrov)'
import os
# This environment variable can be used to switch to a certain implementation
# of the Python API. Right now only 'python' and 'cpp' are valid values. Any
# other value will be ignored.
_implementation_type = os.getenv('PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION',
'python')
if _implementation_type != 'python':
# For now, by default use the pure-Python implementation.
# The code below checks if the C extension is available and
# uses it if it is available.
_implementation_type = 'cpp'
## Determine automatically which implementation to use.
#try:
# from google.protobuf.internal import cpp_message
# _implementation_type = 'cpp'
#except ImportError, e:
# _implementation_type = 'python'
# This environment variable can be used to switch between the two
# 'cpp' implementations. Right now only 1 and 2 are valid values. Any
# other value will be ignored.
_implementation_version_str = os.getenv(
'PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION_VERSION',
'1')
if _implementation_version_str not in ('1', '2'):
raise ValueError(
"unsupported PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION_VERSION: '" +
_implementation_version_str + "' (supported versions: 1, 2)"
)
_implementation_version = int(_implementation_version_str)
# Usage of this function is discouraged. Clients shouldn't care which
# implementation of the API is in use. Note that there is no guarantee
# that differences between APIs will be maintained.
# Please don't use this function if possible.
def Type():
return _implementation_type
# See comment on 'Type' above.
def Version():
return _implementation_version
|
bsd-3-clause
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.